You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

714 lines
26KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include "libavutil/avassert.h"
  20. #include "libavutil/imgutils.h"
  21. #include "libavutil/hwcontext.h"
  22. #if CONFIG_D3D11VA
  23. #include "libavutil/hwcontext_d3d11va.h"
  24. #endif
  25. #include "libavutil/mem.h"
  26. #include "libavutil/pixdesc.h"
  27. #include "libavutil/time.h"
  28. #include "amfenc.h"
  29. #include "internal.h"
  30. #if CONFIG_D3D11VA
  31. #include <d3d11.h>
  32. #endif
  33. #ifdef _WIN32
  34. #include "compat/w32dlfcn.h"
  35. #else
  36. #include <dlfcn.h>
  37. #endif
  38. #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
  39. #define PTS_PROP L"PtsProp"
  40. const enum AVPixelFormat ff_amf_pix_fmts[] = {
  41. AV_PIX_FMT_NV12,
  42. AV_PIX_FMT_YUV420P,
  43. #if CONFIG_D3D11VA
  44. AV_PIX_FMT_D3D11,
  45. #endif
  46. AV_PIX_FMT_NONE
  47. };
  48. typedef struct FormatMap {
  49. enum AVPixelFormat av_format;
  50. enum AMF_SURFACE_FORMAT amf_format;
  51. } FormatMap;
  52. static const FormatMap format_map[] =
  53. {
  54. { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
  55. { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
  56. { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
  57. { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
  58. { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
  59. { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
  60. { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
  61. { AV_PIX_FMT_D3D11, AMF_SURFACE_NV12 },
  62. };
  63. static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
  64. {
  65. int i;
  66. for (i = 0; i < amf_countof(format_map); i++) {
  67. if (format_map[i].av_format == fmt) {
  68. return format_map[i].amf_format;
  69. }
  70. }
  71. return AMF_SURFACE_UNKNOWN;
  72. }
  73. static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
  74. const wchar_t *scope, const wchar_t *message)
  75. {
  76. AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
  77. av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
  78. }
  79. static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
  80. {
  81. }
  82. static AMFTraceWriterVtbl tracer_vtbl =
  83. {
  84. .Write = AMFTraceWriter_Write,
  85. .Flush = AMFTraceWriter_Flush,
  86. };
  87. static int amf_load_library(AVCodecContext *avctx)
  88. {
  89. AmfContext *ctx = avctx->priv_data;
  90. AMFInit_Fn init_fun = NULL;
  91. AMFQueryVersion_Fn version_fun = NULL;
  92. AMF_RESULT res = AMF_OK;
  93. ctx->eof = 0;
  94. ctx->delayed_drain = 0;
  95. ctx->hw_frames_ctx = NULL;
  96. ctx->hw_device_ctx = NULL;
  97. ctx->delayed_surface = NULL;
  98. ctx->delayed_frame = av_frame_alloc();
  99. if (!ctx->delayed_frame) {
  100. return AVERROR(ENOMEM);
  101. }
  102. // hardcoded to current HW queue size - will realloc in timestamp_queue_enqueue() if too small
  103. ctx->timestamp_list = av_fifo_alloc((avctx->max_b_frames + 16) * sizeof(int64_t));
  104. if (!ctx->timestamp_list) {
  105. return AVERROR(ENOMEM);
  106. }
  107. ctx->dts_delay = 0;
  108. ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
  109. AMF_RETURN_IF_FALSE(ctx, ctx->library != NULL,
  110. AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
  111. init_fun = (AMFInit_Fn)dlsym(ctx->library, AMF_INIT_FUNCTION_NAME);
  112. AMF_RETURN_IF_FALSE(ctx, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
  113. version_fun = (AMFQueryVersion_Fn)dlsym(ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
  114. AMF_RETURN_IF_FALSE(ctx, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
  115. res = version_fun(&ctx->version);
  116. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
  117. res = init_fun(AMF_FULL_VERSION, &ctx->factory);
  118. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
  119. res = ctx->factory->pVtbl->GetTrace(ctx->factory, &ctx->trace);
  120. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetTrace() failed with error %d\n", res);
  121. res = ctx->factory->pVtbl->GetDebug(ctx->factory, &ctx->debug);
  122. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetDebug() failed with error %d\n", res);
  123. return 0;
  124. }
  125. #if CONFIG_D3D11VA
  126. static int amf_init_from_d3d11_device(AVCodecContext *avctx, AVD3D11VADeviceContext *hwctx)
  127. {
  128. AmfContext *ctx = avctx->priv_data;
  129. AMF_RESULT res;
  130. res = ctx->context->pVtbl->InitDX11(ctx->context, hwctx->device, AMF_DX11_1);
  131. if (res != AMF_OK) {
  132. if (res == AMF_NOT_SUPPORTED)
  133. av_log(avctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
  134. else
  135. av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
  136. return AVERROR(ENODEV);
  137. }
  138. return 0;
  139. }
  140. #endif
  141. static int amf_init_context(AVCodecContext *avctx)
  142. {
  143. AmfContext *ctx = avctx->priv_data;
  144. AMF_RESULT res;
  145. av_unused int ret;
  146. ctx->hwsurfaces_in_queue = 0;
  147. ctx->hwsurfaces_in_queue_max = 16;
  148. // configure AMF logger
  149. // the return of these functions indicates old state and do not affect behaviour
  150. ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, ctx->log_to_dbg != 0 );
  151. if (ctx->log_to_dbg)
  152. ctx->trace->pVtbl->SetWriterLevel(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, AMF_TRACE_TRACE);
  153. ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_CONSOLE, 0);
  154. ctx->trace->pVtbl->SetGlobalLevel(ctx->trace, AMF_TRACE_TRACE);
  155. // connect AMF logger to av_log
  156. ctx->tracer.vtbl = &tracer_vtbl;
  157. ctx->tracer.avctx = avctx;
  158. ctx->trace->pVtbl->RegisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID,(AMFTraceWriter*)&ctx->tracer, 1);
  159. ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
  160. res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
  161. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
  162. // If a device was passed to the encoder, try to initialise from that.
  163. if (avctx->hw_frames_ctx) {
  164. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  165. if (amf_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
  166. av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
  167. av_get_pix_fmt_name(frames_ctx->sw_format));
  168. return AVERROR(EINVAL);
  169. }
  170. switch (frames_ctx->device_ctx->type) {
  171. #if CONFIG_D3D11VA
  172. case AV_HWDEVICE_TYPE_D3D11VA:
  173. ret = amf_init_from_d3d11_device(avctx, frames_ctx->device_ctx->hwctx);
  174. if (ret < 0)
  175. return ret;
  176. break;
  177. #endif
  178. default:
  179. av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s frames context is not supported.\n",
  180. av_hwdevice_get_type_name(frames_ctx->device_ctx->type));
  181. return AVERROR(ENOSYS);
  182. }
  183. ctx->hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
  184. if (!ctx->hw_frames_ctx)
  185. return AVERROR(ENOMEM);
  186. if (frames_ctx->initial_pool_size > 0)
  187. ctx->hwsurfaces_in_queue_max = frames_ctx->initial_pool_size - 1;
  188. } else if (avctx->hw_device_ctx) {
  189. AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
  190. switch (device_ctx->type) {
  191. #if CONFIG_D3D11VA
  192. case AV_HWDEVICE_TYPE_D3D11VA:
  193. ret = amf_init_from_d3d11_device(avctx, device_ctx->hwctx);
  194. if (ret < 0)
  195. return ret;
  196. break;
  197. #endif
  198. default:
  199. av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
  200. av_hwdevice_get_type_name(device_ctx->type));
  201. return AVERROR(ENOSYS);
  202. }
  203. ctx->hw_device_ctx = av_buffer_ref(avctx->hw_device_ctx);
  204. if (!ctx->hw_device_ctx)
  205. return AVERROR(ENOMEM);
  206. } else {
  207. res = ctx->context->pVtbl->InitDX11(ctx->context, NULL, AMF_DX11_1);
  208. if (res == AMF_OK) {
  209. av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
  210. } else {
  211. res = ctx->context->pVtbl->InitDX9(ctx->context, NULL);
  212. if (res == AMF_OK) {
  213. av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
  214. } else {
  215. av_log(avctx, AV_LOG_ERROR, "AMF initialisation failed via D3D9: error %d.\n", res);
  216. return AVERROR(ENOSYS);
  217. }
  218. }
  219. }
  220. return 0;
  221. }
  222. static int amf_init_encoder(AVCodecContext *avctx)
  223. {
  224. AmfContext *ctx = avctx->priv_data;
  225. const wchar_t *codec_id = NULL;
  226. AMF_RESULT res = AMF_OK;
  227. switch (avctx->codec->id) {
  228. case AV_CODEC_ID_H264:
  229. codec_id = AMFVideoEncoderVCE_AVC;
  230. break;
  231. case AV_CODEC_ID_HEVC:
  232. codec_id = AMFVideoEncoder_HEVC;
  233. break;
  234. default:
  235. break;
  236. }
  237. AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
  238. ctx->format = amf_av_to_amf_format(avctx->pix_fmt);
  239. AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL), "Format %d is not supported\n", avctx->pix_fmt);
  240. res = ctx->factory->pVtbl->CreateComponent(ctx->factory, ctx->context, codec_id, &ctx->encoder);
  241. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
  242. return 0;
  243. }
  244. int av_cold ff_amf_encode_close(AVCodecContext *avctx)
  245. {
  246. AmfContext *ctx = avctx->priv_data;
  247. if (ctx->delayed_surface)
  248. {
  249. ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
  250. ctx->delayed_surface = NULL;
  251. }
  252. if (ctx->encoder) {
  253. ctx->encoder->pVtbl->Terminate(ctx->encoder);
  254. ctx->encoder->pVtbl->Release(ctx->encoder);
  255. ctx->encoder = NULL;
  256. }
  257. if (ctx->context) {
  258. ctx->context->pVtbl->Terminate(ctx->context);
  259. ctx->context->pVtbl->Release(ctx->context);
  260. ctx->context = NULL;
  261. }
  262. av_buffer_unref(&ctx->hw_device_ctx);
  263. av_buffer_unref(&ctx->hw_frames_ctx);
  264. if (ctx->trace) {
  265. ctx->trace->pVtbl->UnregisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID);
  266. }
  267. if (ctx->library) {
  268. dlclose(ctx->library);
  269. ctx->library = NULL;
  270. }
  271. ctx->trace = NULL;
  272. ctx->debug = NULL;
  273. ctx->factory = NULL;
  274. ctx->version = 0;
  275. ctx->delayed_drain = 0;
  276. av_frame_free(&ctx->delayed_frame);
  277. av_fifo_freep(&ctx->timestamp_list);
  278. return 0;
  279. }
  280. static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
  281. AMFSurface* surface)
  282. {
  283. AMFPlane *plane = NULL;
  284. uint8_t *dst_data[4];
  285. int dst_linesize[4];
  286. int planes;
  287. int i;
  288. planes = surface->pVtbl->GetPlanesCount(surface);
  289. av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
  290. for (i = 0; i < planes; i++) {
  291. plane = surface->pVtbl->GetPlaneAt(surface, i);
  292. dst_data[i] = plane->pVtbl->GetNative(plane);
  293. dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
  294. }
  295. av_image_copy(dst_data, dst_linesize,
  296. (const uint8_t**)frame->data, frame->linesize, frame->format,
  297. avctx->width, avctx->height);
  298. return 0;
  299. }
  300. static inline int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
  301. {
  302. AmfContext *ctx = avctx->priv_data;
  303. if (av_fifo_space(ctx->timestamp_list) < sizeof(timestamp)) {
  304. if (av_fifo_grow(ctx->timestamp_list, sizeof(timestamp)) < 0) {
  305. return AVERROR(ENOMEM);
  306. }
  307. }
  308. av_fifo_generic_write(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
  309. return 0;
  310. }
  311. static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
  312. {
  313. AmfContext *ctx = avctx->priv_data;
  314. int ret;
  315. AMFVariantStruct var = {0};
  316. int64_t timestamp = AV_NOPTS_VALUE;
  317. int64_t size = buffer->pVtbl->GetSize(buffer);
  318. if ((ret = ff_alloc_packet2(avctx, pkt, size, 0)) < 0) {
  319. return ret;
  320. }
  321. memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
  322. switch (avctx->codec->id) {
  323. case AV_CODEC_ID_H264:
  324. buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
  325. if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
  326. pkt->flags = AV_PKT_FLAG_KEY;
  327. }
  328. break;
  329. case AV_CODEC_ID_HEVC:
  330. buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
  331. if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
  332. pkt->flags = AV_PKT_FLAG_KEY;
  333. }
  334. break;
  335. default:
  336. break;
  337. }
  338. buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
  339. pkt->pts = var.int64Value; // original pts
  340. AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN, "timestamp_list is empty\n");
  341. av_fifo_generic_read(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
  342. // calc dts shift if max_b_frames > 0
  343. if (avctx->max_b_frames > 0 && ctx->dts_delay == 0) {
  344. int64_t timestamp_last = AV_NOPTS_VALUE;
  345. AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN,
  346. "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
  347. av_fifo_generic_peek_at(
  348. ctx->timestamp_list,
  349. &timestamp_last,
  350. (av_fifo_size(ctx->timestamp_list) / sizeof(timestamp) - 1) * sizeof(timestamp_last),
  351. sizeof(timestamp_last),
  352. NULL);
  353. if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
  354. return AVERROR(ERANGE);
  355. }
  356. ctx->dts_delay = timestamp_last - timestamp;
  357. }
  358. pkt->dts = timestamp - ctx->dts_delay;
  359. return 0;
  360. }
  361. // amfenc API implementation
  362. int ff_amf_encode_init(AVCodecContext *avctx)
  363. {
  364. AmfContext *ctx = avctx->priv_data;
  365. int ret;
  366. ctx->factory = NULL;
  367. ctx->debug = NULL;
  368. ctx->trace = NULL;
  369. ctx->context = NULL;
  370. ctx->encoder = NULL;
  371. ctx->library = NULL;
  372. ctx->version = 0;
  373. ctx->eof = 0;
  374. ctx->format = 0;
  375. ctx->tracer.vtbl = NULL;
  376. ctx->tracer.avctx = NULL;
  377. if ((ret = amf_load_library(avctx)) == 0) {
  378. if ((ret = amf_init_context(avctx)) == 0) {
  379. if ((ret = amf_init_encoder(avctx)) == 0) {
  380. return 0;
  381. }
  382. }
  383. }
  384. ff_amf_encode_close(avctx);
  385. return ret;
  386. }
  387. static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
  388. {
  389. AMF_RESULT res;
  390. AMFVariantStruct var;
  391. res = AMFVariantInit(&var);
  392. if (res == AMF_OK) {
  393. AMFGuid guid_AMFInterface = IID_AMFInterface();
  394. AMFInterface *amf_interface;
  395. res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
  396. if (res == AMF_OK) {
  397. res = AMFVariantAssignInterface(&var, amf_interface);
  398. amf_interface->pVtbl->Release(amf_interface);
  399. }
  400. if (res == AMF_OK) {
  401. res = object->pVtbl->SetProperty(object, name, var);
  402. }
  403. AMFVariantClear(&var);
  404. }
  405. return res;
  406. }
  407. static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
  408. {
  409. AMF_RESULT res;
  410. AMFVariantStruct var;
  411. res = AMFVariantInit(&var);
  412. if (res == AMF_OK) {
  413. res = object->pVtbl->GetProperty(object, name, &var);
  414. if (res == AMF_OK) {
  415. if (var.type == AMF_VARIANT_INTERFACE) {
  416. AMFGuid guid_AMFBuffer = IID_AMFBuffer();
  417. AMFInterface *amf_interface = AMFVariantInterface(&var);
  418. res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
  419. } else {
  420. res = AMF_INVALID_DATA_TYPE;
  421. }
  422. }
  423. AMFVariantClear(&var);
  424. }
  425. return res;
  426. }
  427. static AMFBuffer *amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
  428. {
  429. AVFrame *frame_ref;
  430. AMFBuffer *frame_ref_storage_buffer = NULL;
  431. AMF_RESULT res;
  432. res = context->pVtbl->AllocBuffer(context, AMF_MEMORY_HOST, sizeof(frame_ref), &frame_ref_storage_buffer);
  433. if (res == AMF_OK) {
  434. frame_ref = av_frame_clone(frame);
  435. if (frame_ref) {
  436. memcpy(frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), &frame_ref, sizeof(frame_ref));
  437. } else {
  438. frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
  439. frame_ref_storage_buffer = NULL;
  440. }
  441. }
  442. return frame_ref_storage_buffer;
  443. }
  444. static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
  445. {
  446. AVFrame *av_frame_ref;
  447. memcpy(&av_frame_ref, frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), sizeof(av_frame_ref));
  448. av_frame_free(&av_frame_ref);
  449. frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
  450. }
  451. int ff_amf_send_frame(AVCodecContext *avctx, const AVFrame *frame)
  452. {
  453. AMF_RESULT res = AMF_OK;
  454. AmfContext *ctx = avctx->priv_data;
  455. AMFSurface *surface = NULL;
  456. int ret;
  457. if (!ctx->encoder)
  458. return AVERROR(EINVAL);
  459. if (!frame) { // submit drain
  460. if (!ctx->eof) { // submit drain one time only
  461. if (ctx->delayed_surface != NULL) {
  462. ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
  463. } else if(!ctx->delayed_drain) {
  464. res = ctx->encoder->pVtbl->Drain(ctx->encoder);
  465. if (res == AMF_INPUT_FULL) {
  466. ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
  467. } else {
  468. if (res == AMF_OK) {
  469. ctx->eof = 1; // drain started
  470. }
  471. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
  472. }
  473. }
  474. } else{
  475. return AVERROR_EOF;
  476. }
  477. } else { // submit frame
  478. int hw_surface = 0;
  479. if (ctx->delayed_surface != NULL) {
  480. return AVERROR(EAGAIN); // should not happen when called from ffmpeg, other clients may resubmit
  481. }
  482. // prepare surface from frame
  483. switch (frame->format) {
  484. #if CONFIG_D3D11VA
  485. case AV_PIX_FMT_D3D11:
  486. {
  487. static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
  488. ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
  489. int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
  490. av_assert0(frame->hw_frames_ctx && ctx->hw_frames_ctx &&
  491. frame->hw_frames_ctx->data == ctx->hw_frames_ctx->data);
  492. texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
  493. res = ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
  494. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
  495. hw_surface = 1;
  496. }
  497. break;
  498. #endif
  499. default:
  500. {
  501. res = ctx->context->pVtbl->AllocSurface(ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
  502. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
  503. amf_copy_surface(avctx, frame, surface);
  504. }
  505. break;
  506. }
  507. if (hw_surface) {
  508. AMFBuffer *frame_ref_storage_buffer;
  509. // input HW surfaces can be vertically aligned by 16; tell AMF the real size
  510. surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
  511. frame_ref_storage_buffer = amf_create_buffer_with_frame_ref(frame, ctx->context);
  512. AMF_RETURN_IF_FALSE(ctx, frame_ref_storage_buffer != NULL, AVERROR(ENOMEM), "create_buffer_with_frame_ref() returned NULL\n");
  513. res = amf_set_property_buffer(surface, L"av_frame_ref", frame_ref_storage_buffer);
  514. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_ref\" with error %d\n", res);
  515. ctx->hwsurfaces_in_queue++;
  516. frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
  517. }
  518. surface->pVtbl->SetPts(surface, frame->pts);
  519. AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
  520. switch (avctx->codec->id) {
  521. case AV_CODEC_ID_H264:
  522. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
  523. break;
  524. case AV_CODEC_ID_HEVC:
  525. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
  526. break;
  527. default:
  528. break;
  529. }
  530. // submit surface
  531. res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
  532. if (res == AMF_INPUT_FULL) { // handle full queue
  533. //store surface for later submission
  534. ctx->delayed_surface = surface;
  535. if (surface->pVtbl->GetMemoryType(surface) == AMF_MEMORY_DX11) {
  536. av_frame_ref(ctx->delayed_frame, frame);
  537. }
  538. } else {
  539. surface->pVtbl->Release(surface);
  540. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
  541. if ((ret = timestamp_queue_enqueue(avctx, frame->pts)) < 0) {
  542. return ret;
  543. }
  544. }
  545. }
  546. return 0;
  547. }
  548. int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
  549. {
  550. int ret;
  551. AMF_RESULT res;
  552. AMF_RESULT res_query;
  553. AmfContext *ctx = avctx->priv_data;
  554. AMFData *data = NULL;
  555. int block_and_wait;
  556. if (!ctx->encoder)
  557. return AVERROR(EINVAL);
  558. do {
  559. block_and_wait = 0;
  560. // poll data
  561. res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
  562. if (data) {
  563. // copy data to packet
  564. AMFBuffer* buffer;
  565. AMFGuid guid = IID_AMFBuffer();
  566. data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
  567. ret = amf_copy_buffer(avctx, avpkt, buffer);
  568. buffer->pVtbl->Release(buffer);
  569. if (data->pVtbl->HasProperty(data, L"av_frame_ref")) {
  570. AMFBuffer *frame_ref_storage_buffer;
  571. res = amf_get_property_buffer(data, L"av_frame_ref", &frame_ref_storage_buffer);
  572. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_ref\" with error %d\n", res);
  573. amf_release_buffer_with_frame_ref(frame_ref_storage_buffer);
  574. ctx->hwsurfaces_in_queue--;
  575. }
  576. data->pVtbl->Release(data);
  577. AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
  578. if (ctx->delayed_surface != NULL) { // try to resubmit frame
  579. res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)ctx->delayed_surface);
  580. if (res != AMF_INPUT_FULL) {
  581. int64_t pts = ctx->delayed_surface->pVtbl->GetPts(ctx->delayed_surface);
  582. ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
  583. ctx->delayed_surface = NULL;
  584. av_frame_unref(ctx->delayed_frame);
  585. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated SubmitInput() failed with error %d\n", res);
  586. if ((ret = timestamp_queue_enqueue(avctx, pts)) < 0) {
  587. return ret;
  588. }
  589. } else {
  590. av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed frame submission got AMF_INPUT_FULL- should not happen\n");
  591. }
  592. } else if (ctx->delayed_drain) { // try to resubmit drain
  593. res = ctx->encoder->pVtbl->Drain(ctx->encoder);
  594. if (res != AMF_INPUT_FULL) {
  595. ctx->delayed_drain = 0;
  596. ctx->eof = 1; // drain started
  597. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
  598. } else {
  599. av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
  600. }
  601. }
  602. } else if (ctx->delayed_surface != NULL || ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max)) {
  603. block_and_wait = 1;
  604. av_usleep(1000); // wait and poll again
  605. }
  606. } while (block_and_wait);
  607. if (res_query == AMF_EOF) {
  608. ret = AVERROR_EOF;
  609. } else if (data == NULL) {
  610. ret = AVERROR(EAGAIN);
  611. } else {
  612. ret = 0;
  613. }
  614. return ret;
  615. }