You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

593 lines
19KB

  1. /*
  2. * Intel MediaSDK QSV encoder utility functions
  3. *
  4. * copyright (c) 2013 Yukinori Yamazoe
  5. * copyright (c) 2015 Anton Khirnov
  6. *
  7. * This file is part of Libav.
  8. *
  9. * Libav is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * Libav is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with Libav; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <string.h>
  24. #include <sys/types.h>
  25. #include <mfx/mfxvideo.h>
  26. #include "libavutil/common.h"
  27. #include "libavutil/mem.h"
  28. #include "libavutil/log.h"
  29. #include "libavutil/time.h"
  30. #include "libavutil/imgutils.h"
  31. #include "avcodec.h"
  32. #include "internal.h"
  33. #include "qsv.h"
  34. #include "qsv_internal.h"
  35. #include "qsvenc.h"
  36. static int init_video_param(AVCodecContext *avctx, QSVEncContext *q)
  37. {
  38. const char *ratecontrol_desc;
  39. float quant;
  40. int ret;
  41. ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
  42. if (ret < 0)
  43. return AVERROR_BUG;
  44. q->param.mfx.CodecId = ret;
  45. q->width_align = avctx->codec_id == AV_CODEC_ID_HEVC ? 32 : 16;
  46. if (avctx->level > 0)
  47. q->param.mfx.CodecLevel = avctx->level;
  48. q->param.mfx.CodecProfile = q->profile;
  49. q->param.mfx.TargetUsage = q->preset;
  50. q->param.mfx.GopPicSize = FFMAX(0, avctx->gop_size);
  51. q->param.mfx.GopRefDist = FFMAX(-1, avctx->max_b_frames) + 1;
  52. q->param.mfx.GopOptFlag = avctx->flags & AV_CODEC_FLAG_CLOSED_GOP ?
  53. MFX_GOP_CLOSED : 0;
  54. q->param.mfx.IdrInterval = q->idr_interval;
  55. q->param.mfx.NumSlice = avctx->slices;
  56. q->param.mfx.NumRefFrame = FFMAX(0, avctx->refs);
  57. q->param.mfx.EncodedOrder = 0;
  58. q->param.mfx.BufferSizeInKB = 0;
  59. q->param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12;
  60. q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, q->width_align);
  61. q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, 32);
  62. q->param.mfx.FrameInfo.CropX = 0;
  63. q->param.mfx.FrameInfo.CropY = 0;
  64. q->param.mfx.FrameInfo.CropW = avctx->width;
  65. q->param.mfx.FrameInfo.CropH = avctx->height;
  66. q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
  67. q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
  68. q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  69. q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  70. q->param.mfx.FrameInfo.BitDepthLuma = 8;
  71. q->param.mfx.FrameInfo.BitDepthChroma = 8;
  72. if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
  73. q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
  74. q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den;
  75. } else {
  76. q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den;
  77. q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num;
  78. }
  79. if (avctx->flags & AV_CODEC_FLAG_QSCALE) {
  80. q->param.mfx.RateControlMethod = MFX_RATECONTROL_CQP;
  81. ratecontrol_desc = "constant quantization parameter (CQP)";
  82. } else if (avctx->rc_max_rate == avctx->bit_rate) {
  83. q->param.mfx.RateControlMethod = MFX_RATECONTROL_CBR;
  84. ratecontrol_desc = "constant bitrate (CBR)";
  85. } else if (!avctx->rc_max_rate) {
  86. q->param.mfx.RateControlMethod = MFX_RATECONTROL_AVBR;
  87. ratecontrol_desc = "average variable bitrate (AVBR)";
  88. } else {
  89. q->param.mfx.RateControlMethod = MFX_RATECONTROL_VBR;
  90. ratecontrol_desc = "variable bitrate (VBR)";
  91. }
  92. av_log(avctx, AV_LOG_VERBOSE, "Using the %s ratecontrol method\n", ratecontrol_desc);
  93. switch (q->param.mfx.RateControlMethod) {
  94. case MFX_RATECONTROL_CBR:
  95. case MFX_RATECONTROL_VBR:
  96. q->param.mfx.InitialDelayInKB = avctx->rc_initial_buffer_occupancy / 1000;
  97. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  98. q->param.mfx.MaxKbps = avctx->bit_rate / 1000;
  99. break;
  100. case MFX_RATECONTROL_CQP:
  101. quant = avctx->global_quality / FF_QP2LAMBDA;
  102. q->param.mfx.QPI = av_clip(quant * fabs(avctx->i_quant_factor) + avctx->i_quant_offset, 0, 51);
  103. q->param.mfx.QPP = av_clip(quant, 0, 51);
  104. q->param.mfx.QPB = av_clip(quant * fabs(avctx->b_quant_factor) + avctx->b_quant_offset, 0, 51);
  105. break;
  106. case MFX_RATECONTROL_AVBR:
  107. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  108. q->param.mfx.Convergence = q->avbr_convergence;
  109. q->param.mfx.Accuracy = q->avbr_accuracy;
  110. break;
  111. }
  112. // the HEVC encoder plugin currently fails if coding options
  113. // are provided
  114. if (avctx->codec_id != AV_CODEC_ID_HEVC) {
  115. q->extco.Header.BufferId = MFX_EXTBUFF_CODING_OPTION;
  116. q->extco.Header.BufferSz = sizeof(q->extco);
  117. q->extco.CAVLC = avctx->coder_type == FF_CODER_TYPE_VLC ?
  118. MFX_CODINGOPTION_ON : MFX_CODINGOPTION_UNKNOWN;
  119. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco;
  120. }
  121. return 0;
  122. }
  123. static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q)
  124. {
  125. uint8_t sps_buf[128];
  126. uint8_t pps_buf[128];
  127. mfxExtCodingOptionSPSPPS extradata = {
  128. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION_SPSPPS,
  129. .Header.BufferSz = sizeof(extradata),
  130. .SPSBuffer = sps_buf, .SPSBufSize = sizeof(sps_buf),
  131. .PPSBuffer = pps_buf, .PPSBufSize = sizeof(pps_buf)
  132. };
  133. mfxExtBuffer *ext_buffers[] = {
  134. (mfxExtBuffer*)&extradata,
  135. };
  136. int need_pps = avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO;
  137. int ret;
  138. q->param.ExtParam = ext_buffers;
  139. q->param.NumExtParam = FF_ARRAY_ELEMS(ext_buffers);
  140. ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param);
  141. if (ret < 0)
  142. return ff_qsv_error(ret);
  143. q->packet_size = q->param.mfx.BufferSizeInKB * 1000;
  144. if (!extradata.SPSBufSize || (need_pps && !extradata.PPSBufSize)) {
  145. av_log(avctx, AV_LOG_ERROR, "No extradata returned from libmfx.\n");
  146. return AVERROR_UNKNOWN;
  147. }
  148. avctx->extradata = av_malloc(extradata.SPSBufSize + need_pps * extradata.PPSBufSize +
  149. AV_INPUT_BUFFER_PADDING_SIZE);
  150. if (!avctx->extradata)
  151. return AVERROR(ENOMEM);
  152. memcpy(avctx->extradata, sps_buf, extradata.SPSBufSize);
  153. if (need_pps)
  154. memcpy(avctx->extradata + extradata.SPSBufSize, pps_buf, extradata.PPSBufSize);
  155. avctx->extradata_size = extradata.SPSBufSize + need_pps * extradata.PPSBufSize;
  156. memset(avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  157. return 0;
  158. }
  159. static int qsv_init_opaque_alloc(AVCodecContext *avctx, QSVEncContext *q)
  160. {
  161. AVQSVContext *qsv = avctx->hwaccel_context;
  162. mfxFrameSurface1 *surfaces;
  163. int nb_surfaces, i;
  164. nb_surfaces = qsv->nb_opaque_surfaces + q->req.NumFrameSuggested + q->async_depth;
  165. q->opaque_alloc_buf = av_buffer_allocz(sizeof(*surfaces) * nb_surfaces);
  166. if (!q->opaque_alloc_buf)
  167. return AVERROR(ENOMEM);
  168. q->opaque_surfaces = av_malloc_array(nb_surfaces, sizeof(*q->opaque_surfaces));
  169. if (!q->opaque_surfaces)
  170. return AVERROR(ENOMEM);
  171. surfaces = (mfxFrameSurface1*)q->opaque_alloc_buf->data;
  172. for (i = 0; i < nb_surfaces; i++) {
  173. surfaces[i].Info = q->req.Info;
  174. q->opaque_surfaces[i] = surfaces + i;
  175. }
  176. q->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  177. q->opaque_alloc.Header.BufferSz = sizeof(q->opaque_alloc);
  178. q->opaque_alloc.In.Surfaces = q->opaque_surfaces;
  179. q->opaque_alloc.In.NumSurface = nb_surfaces;
  180. q->opaque_alloc.In.Type = q->req.Type;
  181. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->opaque_alloc;
  182. qsv->nb_opaque_surfaces = nb_surfaces;
  183. qsv->opaque_surfaces = q->opaque_alloc_buf;
  184. qsv->opaque_alloc_type = q->req.Type;
  185. return 0;
  186. }
  187. int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
  188. {
  189. int opaque_alloc = 0;
  190. int ret;
  191. q->param.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
  192. q->param.AsyncDepth = q->async_depth;
  193. q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
  194. (sizeof(AVPacket) + sizeof(mfxSyncPoint) + sizeof(mfxBitstream*)));
  195. if (!q->async_fifo)
  196. return AVERROR(ENOMEM);
  197. if (avctx->hwaccel_context) {
  198. AVQSVContext *qsv = avctx->hwaccel_context;
  199. q->session = qsv->session;
  200. q->param.IOPattern = qsv->iopattern;
  201. opaque_alloc = qsv->opaque_alloc;
  202. }
  203. if (!q->session) {
  204. ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
  205. q->load_plugins);
  206. if (ret < 0)
  207. return ret;
  208. q->session = q->internal_session;
  209. }
  210. ret = init_video_param(avctx, q);
  211. if (ret < 0)
  212. return ret;
  213. ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req);
  214. if (ret < 0) {
  215. av_log(avctx, AV_LOG_ERROR, "Error querying the encoding parameters\n");
  216. return ff_qsv_error(ret);
  217. }
  218. if (opaque_alloc) {
  219. ret = qsv_init_opaque_alloc(avctx, q);
  220. if (ret < 0)
  221. return ret;
  222. }
  223. if (avctx->hwaccel_context) {
  224. AVQSVContext *qsv = avctx->hwaccel_context;
  225. int i, j;
  226. q->extparam = av_mallocz_array(qsv->nb_ext_buffers + q->nb_extparam_internal,
  227. sizeof(*q->extparam));
  228. if (!q->extparam)
  229. return AVERROR(ENOMEM);
  230. q->param.ExtParam = q->extparam;
  231. for (i = 0; i < qsv->nb_ext_buffers; i++)
  232. q->param.ExtParam[i] = qsv->ext_buffers[i];
  233. q->param.NumExtParam = qsv->nb_ext_buffers;
  234. for (i = 0; i < q->nb_extparam_internal; i++) {
  235. for (j = 0; j < qsv->nb_ext_buffers; j++) {
  236. if (qsv->ext_buffers[j]->BufferId == q->extparam_internal[i]->BufferId)
  237. break;
  238. }
  239. if (j < qsv->nb_ext_buffers)
  240. continue;
  241. q->param.ExtParam[q->param.NumExtParam++] = q->extparam_internal[i];
  242. }
  243. } else {
  244. q->param.ExtParam = q->extparam_internal;
  245. q->param.NumExtParam = q->nb_extparam_internal;
  246. }
  247. ret = MFXVideoENCODE_Init(q->session, &q->param);
  248. if (ret < 0) {
  249. av_log(avctx, AV_LOG_ERROR, "Error initializing the encoder\n");
  250. return ff_qsv_error(ret);
  251. }
  252. ret = qsv_retrieve_enc_params(avctx, q);
  253. if (ret < 0) {
  254. av_log(avctx, AV_LOG_ERROR, "Error retrieving encoding parameters.\n");
  255. return ret;
  256. }
  257. q->avctx = avctx;
  258. return 0;
  259. }
  260. static void clear_unused_frames(QSVEncContext *q)
  261. {
  262. QSVFrame *cur = q->work_frames;
  263. while (cur) {
  264. if (cur->surface && !cur->surface->Data.Locked) {
  265. cur->surface = NULL;
  266. av_frame_unref(cur->frame);
  267. }
  268. cur = cur->next;
  269. }
  270. }
  271. static int get_free_frame(QSVEncContext *q, QSVFrame **f)
  272. {
  273. QSVFrame *frame, **last;
  274. clear_unused_frames(q);
  275. frame = q->work_frames;
  276. last = &q->work_frames;
  277. while (frame) {
  278. if (!frame->surface) {
  279. *f = frame;
  280. return 0;
  281. }
  282. last = &frame->next;
  283. frame = frame->next;
  284. }
  285. frame = av_mallocz(sizeof(*frame));
  286. if (!frame)
  287. return AVERROR(ENOMEM);
  288. frame->frame = av_frame_alloc();
  289. if (!frame->frame) {
  290. av_freep(&frame);
  291. return AVERROR(ENOMEM);
  292. }
  293. *last = frame;
  294. *f = frame;
  295. return 0;
  296. }
  297. static int submit_frame(QSVEncContext *q, const AVFrame *frame,
  298. mfxFrameSurface1 **surface)
  299. {
  300. QSVFrame *qf;
  301. int ret;
  302. ret = get_free_frame(q, &qf);
  303. if (ret < 0)
  304. return ret;
  305. if (frame->format == AV_PIX_FMT_QSV) {
  306. ret = av_frame_ref(qf->frame, frame);
  307. if (ret < 0)
  308. return ret;
  309. qf->surface = (mfxFrameSurface1*)qf->frame->data[3];
  310. } else {
  311. /* make a copy if the input is not padded as libmfx requires */
  312. if (frame->height & 31 || frame->linesize[0] & (q->width_align - 1)) {
  313. qf->frame->height = FFALIGN(frame->height, 32);
  314. qf->frame->width = FFALIGN(frame->width, q->width_align);
  315. ret = ff_get_buffer(q->avctx, qf->frame, AV_GET_BUFFER_FLAG_REF);
  316. if (ret < 0)
  317. return ret;
  318. qf->frame->height = frame->height;
  319. qf->frame->width = frame->width;
  320. ret = av_frame_copy(qf->frame, frame);
  321. if (ret < 0) {
  322. av_frame_unref(qf->frame);
  323. return ret;
  324. }
  325. } else {
  326. ret = av_frame_ref(qf->frame, frame);
  327. if (ret < 0)
  328. return ret;
  329. }
  330. qf->surface_internal.Info = q->param.mfx.FrameInfo;
  331. qf->surface_internal.Info.PicStruct =
  332. !frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
  333. frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  334. MFX_PICSTRUCT_FIELD_BFF;
  335. if (frame->repeat_pict == 1)
  336. qf->surface_internal.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
  337. else if (frame->repeat_pict == 2)
  338. qf->surface_internal.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
  339. else if (frame->repeat_pict == 4)
  340. qf->surface_internal.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
  341. qf->surface_internal.Data.PitchLow = qf->frame->linesize[0];
  342. qf->surface_internal.Data.Y = qf->frame->data[0];
  343. qf->surface_internal.Data.UV = qf->frame->data[1];
  344. qf->surface = &qf->surface_internal;
  345. }
  346. qf->surface->Data.TimeStamp = av_rescale_q(frame->pts, q->avctx->time_base, (AVRational){1, 90000});
  347. *surface = qf->surface;
  348. return 0;
  349. }
  350. static void print_interlace_msg(AVCodecContext *avctx, QSVEncContext *q)
  351. {
  352. if (q->param.mfx.CodecId == MFX_CODEC_AVC) {
  353. if (q->param.mfx.CodecProfile == MFX_PROFILE_AVC_BASELINE ||
  354. q->param.mfx.CodecLevel < MFX_LEVEL_AVC_21 ||
  355. q->param.mfx.CodecLevel > MFX_LEVEL_AVC_41)
  356. av_log(avctx, AV_LOG_WARNING,
  357. "Interlaced coding is supported"
  358. " at Main/High Profile Level 2.1-4.1\n");
  359. }
  360. }
  361. int ff_qsv_encode(AVCodecContext *avctx, QSVEncContext *q,
  362. AVPacket *pkt, const AVFrame *frame, int *got_packet)
  363. {
  364. AVPacket new_pkt = { 0 };
  365. mfxBitstream *bs;
  366. mfxFrameSurface1 *surf = NULL;
  367. mfxSyncPoint sync = NULL;
  368. int ret;
  369. if (frame) {
  370. ret = submit_frame(q, frame, &surf);
  371. if (ret < 0) {
  372. av_log(avctx, AV_LOG_ERROR, "Error submitting the frame for encoding.\n");
  373. return ret;
  374. }
  375. }
  376. ret = av_new_packet(&new_pkt, q->packet_size);
  377. if (ret < 0) {
  378. av_log(avctx, AV_LOG_ERROR, "Error allocating the output packet\n");
  379. return ret;
  380. }
  381. bs = av_mallocz(sizeof(*bs));
  382. if (!bs) {
  383. av_packet_unref(&new_pkt);
  384. return AVERROR(ENOMEM);
  385. }
  386. bs->Data = new_pkt.data;
  387. bs->MaxLength = new_pkt.size;
  388. do {
  389. ret = MFXVideoENCODE_EncodeFrameAsync(q->session, NULL, surf, bs, &sync);
  390. if (ret == MFX_WRN_DEVICE_BUSY)
  391. av_usleep(1);
  392. } while (ret > 0);
  393. if (ret < 0) {
  394. av_packet_unref(&new_pkt);
  395. av_freep(&bs);
  396. return (ret == MFX_ERR_MORE_DATA) ? 0 : ff_qsv_error(ret);
  397. }
  398. if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame)
  399. print_interlace_msg(avctx, q);
  400. if (sync) {
  401. av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  402. av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
  403. av_fifo_generic_write(q->async_fifo, &bs, sizeof(bs), NULL);
  404. } else {
  405. av_packet_unref(&new_pkt);
  406. av_freep(&bs);
  407. }
  408. if (!av_fifo_space(q->async_fifo) ||
  409. (!frame && av_fifo_size(q->async_fifo))) {
  410. av_fifo_generic_read(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  411. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  412. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  413. MFXVideoCORE_SyncOperation(q->session, sync, 60000);
  414. new_pkt.dts = av_rescale_q(bs->DecodeTimeStamp, (AVRational){1, 90000}, avctx->time_base);
  415. new_pkt.pts = av_rescale_q(bs->TimeStamp, (AVRational){1, 90000}, avctx->time_base);
  416. new_pkt.size = bs->DataLength;
  417. if (bs->FrameType & MFX_FRAMETYPE_IDR ||
  418. bs->FrameType & MFX_FRAMETYPE_xIDR)
  419. new_pkt.flags |= AV_PKT_FLAG_KEY;
  420. #if FF_API_CODED_FRAME
  421. FF_DISABLE_DEPRECATION_WARNINGS
  422. if (bs->FrameType & MFX_FRAMETYPE_I || bs->FrameType & MFX_FRAMETYPE_xI)
  423. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
  424. else if (bs->FrameType & MFX_FRAMETYPE_P || bs->FrameType & MFX_FRAMETYPE_xP)
  425. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
  426. else if (bs->FrameType & MFX_FRAMETYPE_B || bs->FrameType & MFX_FRAMETYPE_xB)
  427. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
  428. FF_ENABLE_DEPRECATION_WARNINGS
  429. #endif
  430. av_freep(&bs);
  431. if (pkt->data) {
  432. if (pkt->size < new_pkt.size) {
  433. av_log(avctx, AV_LOG_ERROR, "Submitted buffer not large enough: %d < %d\n",
  434. pkt->size, new_pkt.size);
  435. av_packet_unref(&new_pkt);
  436. return AVERROR(EINVAL);
  437. }
  438. memcpy(pkt->data, new_pkt.data, new_pkt.size);
  439. pkt->size = new_pkt.size;
  440. ret = av_packet_copy_props(pkt, &new_pkt);
  441. av_packet_unref(&new_pkt);
  442. if (ret < 0)
  443. return ret;
  444. } else
  445. *pkt = new_pkt;
  446. *got_packet = 1;
  447. }
  448. return 0;
  449. }
  450. int ff_qsv_enc_close(AVCodecContext *avctx, QSVEncContext *q)
  451. {
  452. QSVFrame *cur;
  453. if (q->session)
  454. MFXVideoENCODE_Close(q->session);
  455. if (q->internal_session)
  456. MFXClose(q->internal_session);
  457. q->session = NULL;
  458. q->internal_session = NULL;
  459. cur = q->work_frames;
  460. while (cur) {
  461. q->work_frames = cur->next;
  462. av_frame_free(&cur->frame);
  463. av_freep(&cur);
  464. cur = q->work_frames;
  465. }
  466. while (q->async_fifo && av_fifo_size(q->async_fifo)) {
  467. AVPacket pkt;
  468. mfxSyncPoint sync;
  469. mfxBitstream *bs;
  470. av_fifo_generic_read(q->async_fifo, &pkt, sizeof(pkt), NULL);
  471. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  472. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  473. av_freep(&bs);
  474. av_packet_unref(&pkt);
  475. }
  476. av_fifo_free(q->async_fifo);
  477. q->async_fifo = NULL;
  478. av_freep(&q->opaque_surfaces);
  479. av_buffer_unref(&q->opaque_alloc_buf);
  480. av_freep(&q->extparam);
  481. return 0;
  482. }