You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1270 lines
43KB

  1. /*
  2. * Intel MediaSDK QSV encoder utility functions
  3. *
  4. * copyright (c) 2013 Yukinori Yamazoe
  5. * copyright (c) 2015 Anton Khirnov
  6. *
  7. * This file is part of FFmpeg.
  8. *
  9. * FFmpeg is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * FFmpeg is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with FFmpeg; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <string.h>
  24. #include <sys/types.h>
  25. #include <mfx/mfxvideo.h>
  26. #include "libavutil/common.h"
  27. #include "libavutil/hwcontext.h"
  28. #include "libavutil/hwcontext_qsv.h"
  29. #include "libavutil/mem.h"
  30. #include "libavutil/log.h"
  31. #include "libavutil/time.h"
  32. #include "libavutil/imgutils.h"
  33. #include "libavcodec/bytestream.h"
  34. #include "avcodec.h"
  35. #include "internal.h"
  36. #include "qsv.h"
  37. #include "qsv_internal.h"
  38. #include "qsvenc.h"
  39. static const struct {
  40. mfxU16 profile;
  41. const char *name;
  42. } profile_names[] = {
  43. { MFX_PROFILE_AVC_BASELINE, "baseline" },
  44. { MFX_PROFILE_AVC_MAIN, "main" },
  45. { MFX_PROFILE_AVC_EXTENDED, "extended" },
  46. { MFX_PROFILE_AVC_HIGH, "high" },
  47. #if QSV_VERSION_ATLEAST(1, 15)
  48. { MFX_PROFILE_AVC_HIGH_422, "high 422" },
  49. #endif
  50. #if QSV_VERSION_ATLEAST(1, 4)
  51. { MFX_PROFILE_AVC_CONSTRAINED_BASELINE, "constrained baseline" },
  52. { MFX_PROFILE_AVC_CONSTRAINED_HIGH, "constrained high" },
  53. { MFX_PROFILE_AVC_PROGRESSIVE_HIGH, "progressive high" },
  54. #endif
  55. { MFX_PROFILE_MPEG2_SIMPLE, "simple" },
  56. { MFX_PROFILE_MPEG2_MAIN, "main" },
  57. { MFX_PROFILE_MPEG2_HIGH, "high" },
  58. { MFX_PROFILE_VC1_SIMPLE, "simple" },
  59. { MFX_PROFILE_VC1_MAIN, "main" },
  60. { MFX_PROFILE_VC1_ADVANCED, "advanced" },
  61. #if QSV_VERSION_ATLEAST(1, 8)
  62. { MFX_PROFILE_HEVC_MAIN, "main" },
  63. { MFX_PROFILE_HEVC_MAIN10, "main10" },
  64. { MFX_PROFILE_HEVC_MAINSP, "mainsp" },
  65. #endif
  66. };
  67. static const char *print_profile(mfxU16 profile)
  68. {
  69. int i;
  70. for (i = 0; i < FF_ARRAY_ELEMS(profile_names); i++)
  71. if (profile == profile_names[i].profile)
  72. return profile_names[i].name;
  73. return "unknown";
  74. }
  75. static const struct {
  76. mfxU16 rc_mode;
  77. const char *name;
  78. } rc_names[] = {
  79. { MFX_RATECONTROL_CBR, "CBR" },
  80. { MFX_RATECONTROL_VBR, "VBR" },
  81. { MFX_RATECONTROL_CQP, "CQP" },
  82. { MFX_RATECONTROL_AVBR, "AVBR" },
  83. #if QSV_HAVE_LA
  84. { MFX_RATECONTROL_LA, "LA" },
  85. #endif
  86. #if QSV_HAVE_ICQ
  87. { MFX_RATECONTROL_ICQ, "ICQ" },
  88. { MFX_RATECONTROL_LA_ICQ, "LA_ICQ" },
  89. #endif
  90. #if QSV_HAVE_VCM
  91. { MFX_RATECONTROL_VCM, "VCM" },
  92. #endif
  93. #if QSV_VERSION_ATLEAST(1, 10)
  94. { MFX_RATECONTROL_LA_EXT, "LA_EXT" },
  95. #endif
  96. #if QSV_HAVE_LA_HRD
  97. { MFX_RATECONTROL_LA_HRD, "LA_HRD" },
  98. #endif
  99. #if QSV_HAVE_QVBR
  100. { MFX_RATECONTROL_QVBR, "QVBR" },
  101. #endif
  102. };
  103. static const char *print_ratecontrol(mfxU16 rc_mode)
  104. {
  105. int i;
  106. for (i = 0; i < FF_ARRAY_ELEMS(rc_names); i++)
  107. if (rc_mode == rc_names[i].rc_mode)
  108. return rc_names[i].name;
  109. return "unknown";
  110. }
  111. static const char *print_threestate(mfxU16 val)
  112. {
  113. if (val == MFX_CODINGOPTION_ON)
  114. return "ON";
  115. else if (val == MFX_CODINGOPTION_OFF)
  116. return "OFF";
  117. return "unknown";
  118. }
  119. static void dump_video_param(AVCodecContext *avctx, QSVEncContext *q,
  120. mfxExtBuffer **coding_opts)
  121. {
  122. mfxInfoMFX *info = &q->param.mfx;
  123. mfxExtCodingOption *co = (mfxExtCodingOption*)coding_opts[0];
  124. #if QSV_HAVE_CO2
  125. mfxExtCodingOption2 *co2 = (mfxExtCodingOption2*)coding_opts[1];
  126. #endif
  127. av_log(avctx, AV_LOG_VERBOSE, "profile: %s; level: %"PRIu16"\n",
  128. print_profile(info->CodecProfile), info->CodecLevel);
  129. av_log(avctx, AV_LOG_VERBOSE, "GopPicSize: %"PRIu16"; GopRefDist: %"PRIu16"; GopOptFlag: ",
  130. info->GopPicSize, info->GopRefDist);
  131. if (info->GopOptFlag & MFX_GOP_CLOSED)
  132. av_log(avctx, AV_LOG_VERBOSE, "closed ");
  133. if (info->GopOptFlag & MFX_GOP_STRICT)
  134. av_log(avctx, AV_LOG_VERBOSE, "strict ");
  135. av_log(avctx, AV_LOG_VERBOSE, "; IdrInterval: %"PRIu16"\n", info->IdrInterval);
  136. av_log(avctx, AV_LOG_VERBOSE, "TargetUsage: %"PRIu16"; RateControlMethod: %s\n",
  137. info->TargetUsage, print_ratecontrol(info->RateControlMethod));
  138. if (info->RateControlMethod == MFX_RATECONTROL_CBR ||
  139. info->RateControlMethod == MFX_RATECONTROL_VBR
  140. #if QSV_HAVE_VCM
  141. || info->RateControlMethod == MFX_RATECONTROL_VCM
  142. #endif
  143. ) {
  144. av_log(avctx, AV_LOG_VERBOSE,
  145. "InitialDelayInKB: %"PRIu16"; TargetKbps: %"PRIu16"; MaxKbps: %"PRIu16"\n",
  146. info->InitialDelayInKB, info->TargetKbps, info->MaxKbps);
  147. } else if (info->RateControlMethod == MFX_RATECONTROL_CQP) {
  148. av_log(avctx, AV_LOG_VERBOSE, "QPI: %"PRIu16"; QPP: %"PRIu16"; QPB: %"PRIu16"\n",
  149. info->QPI, info->QPP, info->QPB);
  150. } else if (info->RateControlMethod == MFX_RATECONTROL_AVBR) {
  151. av_log(avctx, AV_LOG_VERBOSE,
  152. "TargetKbps: %"PRIu16"; Accuracy: %"PRIu16"; Convergence: %"PRIu16"\n",
  153. info->TargetKbps, info->Accuracy, info->Convergence);
  154. }
  155. #if QSV_HAVE_LA
  156. else if (info->RateControlMethod == MFX_RATECONTROL_LA
  157. #if QSV_HAVE_LA_HRD
  158. || info->RateControlMethod == MFX_RATECONTROL_LA_HRD
  159. #endif
  160. ) {
  161. av_log(avctx, AV_LOG_VERBOSE,
  162. "TargetKbps: %"PRIu16"; LookAheadDepth: %"PRIu16"\n",
  163. info->TargetKbps, co2->LookAheadDepth);
  164. }
  165. #endif
  166. #if QSV_HAVE_ICQ
  167. else if (info->RateControlMethod == MFX_RATECONTROL_ICQ) {
  168. av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"\n", info->ICQQuality);
  169. } else if (info->RateControlMethod == MFX_RATECONTROL_LA_ICQ) {
  170. av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"; LookAheadDepth: %"PRIu16"\n",
  171. info->ICQQuality, co2->LookAheadDepth);
  172. }
  173. #endif
  174. av_log(avctx, AV_LOG_VERBOSE, "NumSlice: %"PRIu16"; NumRefFrame: %"PRIu16"\n",
  175. info->NumSlice, info->NumRefFrame);
  176. av_log(avctx, AV_LOG_VERBOSE, "RateDistortionOpt: %s\n",
  177. print_threestate(co->RateDistortionOpt));
  178. #if QSV_HAVE_CO2
  179. av_log(avctx, AV_LOG_VERBOSE,
  180. "RecoveryPointSEI: %s IntRefType: %"PRIu16"; IntRefCycleSize: %"PRIu16"; IntRefQPDelta: %"PRId16"\n",
  181. print_threestate(co->RecoveryPointSEI), co2->IntRefType, co2->IntRefCycleSize, co2->IntRefQPDelta);
  182. av_log(avctx, AV_LOG_VERBOSE, "MaxFrameSize: %"PRIu16"; ", co2->MaxFrameSize);
  183. #if QSV_HAVE_MAX_SLICE_SIZE
  184. av_log(avctx, AV_LOG_VERBOSE, "MaxSliceSize: %"PRIu16"; ", co2->MaxSliceSize);
  185. #endif
  186. av_log(avctx, AV_LOG_VERBOSE, "\n");
  187. av_log(avctx, AV_LOG_VERBOSE,
  188. "BitrateLimit: %s; MBBRC: %s; ExtBRC: %s\n",
  189. print_threestate(co2->BitrateLimit), print_threestate(co2->MBBRC),
  190. print_threestate(co2->ExtBRC));
  191. #if QSV_HAVE_TRELLIS
  192. av_log(avctx, AV_LOG_VERBOSE, "Trellis: ");
  193. if (co2->Trellis & MFX_TRELLIS_OFF) {
  194. av_log(avctx, AV_LOG_VERBOSE, "off");
  195. } else if (!co2->Trellis) {
  196. av_log(avctx, AV_LOG_VERBOSE, "auto");
  197. } else {
  198. if (co2->Trellis & MFX_TRELLIS_I) av_log(avctx, AV_LOG_VERBOSE, "I");
  199. if (co2->Trellis & MFX_TRELLIS_P) av_log(avctx, AV_LOG_VERBOSE, "P");
  200. if (co2->Trellis & MFX_TRELLIS_B) av_log(avctx, AV_LOG_VERBOSE, "B");
  201. }
  202. av_log(avctx, AV_LOG_VERBOSE, "\n");
  203. #endif
  204. #if QSV_VERSION_ATLEAST(1, 8)
  205. av_log(avctx, AV_LOG_VERBOSE,
  206. "RepeatPPS: %s; NumMbPerSlice: %"PRIu16"; LookAheadDS: ",
  207. print_threestate(co2->RepeatPPS), co2->NumMbPerSlice);
  208. switch (co2->LookAheadDS) {
  209. case MFX_LOOKAHEAD_DS_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break;
  210. case MFX_LOOKAHEAD_DS_2x: av_log(avctx, AV_LOG_VERBOSE, "2x"); break;
  211. case MFX_LOOKAHEAD_DS_4x: av_log(avctx, AV_LOG_VERBOSE, "4x"); break;
  212. default: av_log(avctx, AV_LOG_VERBOSE, "unknown"); break;
  213. }
  214. av_log(avctx, AV_LOG_VERBOSE, "\n");
  215. av_log(avctx, AV_LOG_VERBOSE, "AdaptiveI: %s; AdaptiveB: %s; BRefType: ",
  216. print_threestate(co2->AdaptiveI), print_threestate(co2->AdaptiveB));
  217. switch (co2->BRefType) {
  218. case MFX_B_REF_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break;
  219. case MFX_B_REF_PYRAMID: av_log(avctx, AV_LOG_VERBOSE, "pyramid"); break;
  220. default: av_log(avctx, AV_LOG_VERBOSE, "auto"); break;
  221. }
  222. av_log(avctx, AV_LOG_VERBOSE, "\n");
  223. #endif
  224. #if QSV_VERSION_ATLEAST(1, 9)
  225. av_log(avctx, AV_LOG_VERBOSE,
  226. "MinQPI: %"PRIu8"; MaxQPI: %"PRIu8"; MinQPP: %"PRIu8"; MaxQPP: %"PRIu8"; MinQPB: %"PRIu8"; MaxQPB: %"PRIu8"\n",
  227. co2->MinQPI, co2->MaxQPI, co2->MinQPP, co2->MaxQPP, co2->MinQPB, co2->MaxQPB);
  228. #endif
  229. #endif
  230. if (avctx->codec_id == AV_CODEC_ID_H264) {
  231. av_log(avctx, AV_LOG_VERBOSE, "Entropy coding: %s; MaxDecFrameBuffering: %"PRIu16"\n",
  232. co->CAVLC == MFX_CODINGOPTION_ON ? "CAVLC" : "CABAC", co->MaxDecFrameBuffering);
  233. av_log(avctx, AV_LOG_VERBOSE,
  234. "NalHrdConformance: %s; SingleSeiNalUnit: %s; VuiVclHrdParameters: %s VuiNalHrdParameters: %s\n",
  235. print_threestate(co->NalHrdConformance), print_threestate(co->SingleSeiNalUnit),
  236. print_threestate(co->VuiVclHrdParameters), print_threestate(co->VuiNalHrdParameters));
  237. }
  238. }
  239. static int select_rc_mode(AVCodecContext *avctx, QSVEncContext *q)
  240. {
  241. const char *rc_desc;
  242. mfxU16 rc_mode;
  243. int want_la = q->look_ahead;
  244. int want_qscale = !!(avctx->flags & AV_CODEC_FLAG_QSCALE);
  245. int want_vcm = q->vcm;
  246. if (want_la && !QSV_HAVE_LA) {
  247. av_log(avctx, AV_LOG_ERROR,
  248. "Lookahead ratecontrol mode requested, but is not supported by this SDK version\n");
  249. return AVERROR(ENOSYS);
  250. }
  251. if (want_vcm && !QSV_HAVE_VCM) {
  252. av_log(avctx, AV_LOG_ERROR,
  253. "VCM ratecontrol mode requested, but is not supported by this SDK version\n");
  254. return AVERROR(ENOSYS);
  255. }
  256. if (want_la + want_qscale + want_vcm > 1) {
  257. av_log(avctx, AV_LOG_ERROR,
  258. "More than one of: { constant qscale, lookahead, VCM } requested, "
  259. "only one of them can be used at a time.\n");
  260. return AVERROR(EINVAL);
  261. }
  262. if (want_qscale) {
  263. rc_mode = MFX_RATECONTROL_CQP;
  264. rc_desc = "constant quantization parameter (CQP)";
  265. }
  266. #if QSV_HAVE_VCM
  267. else if (want_vcm) {
  268. rc_mode = MFX_RATECONTROL_VCM;
  269. rc_desc = "video conferencing mode (VCM)";
  270. }
  271. #endif
  272. #if QSV_HAVE_LA
  273. else if (want_la) {
  274. rc_mode = MFX_RATECONTROL_LA;
  275. rc_desc = "VBR with lookahead (LA)";
  276. #if QSV_HAVE_ICQ
  277. if (avctx->global_quality > 0) {
  278. rc_mode = MFX_RATECONTROL_LA_ICQ;
  279. rc_desc = "intelligent constant quality with lookahead (LA_ICQ)";
  280. }
  281. #endif
  282. }
  283. #endif
  284. #if QSV_HAVE_ICQ
  285. else if (avctx->global_quality > 0) {
  286. rc_mode = MFX_RATECONTROL_ICQ;
  287. rc_desc = "intelligent constant quality (ICQ)";
  288. }
  289. #endif
  290. else if (avctx->rc_max_rate == avctx->bit_rate) {
  291. rc_mode = MFX_RATECONTROL_CBR;
  292. rc_desc = "constant bitrate (CBR)";
  293. } else if (!avctx->rc_max_rate) {
  294. rc_mode = MFX_RATECONTROL_AVBR;
  295. rc_desc = "average variable bitrate (AVBR)";
  296. } else {
  297. rc_mode = MFX_RATECONTROL_VBR;
  298. rc_desc = "variable bitrate (VBR)";
  299. }
  300. q->param.mfx.RateControlMethod = rc_mode;
  301. av_log(avctx, AV_LOG_VERBOSE, "Using the %s ratecontrol method\n", rc_desc);
  302. return 0;
  303. }
  304. static int rc_supported(QSVEncContext *q)
  305. {
  306. mfxVideoParam param_out = { .mfx.CodecId = q->param.mfx.CodecId };
  307. mfxStatus ret;
  308. ret = MFXVideoENCODE_Query(q->session, &q->param, &param_out);
  309. if (ret < 0 ||
  310. param_out.mfx.RateControlMethod != q->param.mfx.RateControlMethod)
  311. return 0;
  312. return 1;
  313. }
  314. static int init_video_param_jpeg(AVCodecContext *avctx, QSVEncContext *q)
  315. {
  316. enum AVPixelFormat sw_format = avctx->pix_fmt == AV_PIX_FMT_QSV ?
  317. avctx->sw_pix_fmt : avctx->pix_fmt;
  318. const AVPixFmtDescriptor *desc;
  319. int ret;
  320. ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
  321. if (ret < 0)
  322. return AVERROR_BUG;
  323. q->param.mfx.CodecId = ret;
  324. if (avctx->level > 0)
  325. q->param.mfx.CodecLevel = avctx->level;
  326. q->param.mfx.CodecProfile = q->profile;
  327. desc = av_pix_fmt_desc_get(sw_format);
  328. if (!desc)
  329. return AVERROR_BUG;
  330. ff_qsv_map_pixfmt(sw_format, &q->param.mfx.FrameInfo.FourCC);
  331. q->param.mfx.FrameInfo.CropX = 0;
  332. q->param.mfx.FrameInfo.CropY = 0;
  333. q->param.mfx.FrameInfo.CropW = avctx->width;
  334. q->param.mfx.FrameInfo.CropH = avctx->height;
  335. q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
  336. q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
  337. q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  338. q->param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
  339. q->param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
  340. q->param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
  341. q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, 16);
  342. q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, 16);
  343. if (avctx->hw_frames_ctx) {
  344. AVHWFramesContext *frames_ctx = (AVHWFramesContext *)avctx->hw_frames_ctx->data;
  345. AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
  346. q->param.mfx.FrameInfo.Width = frames_hwctx->surfaces[0].Info.Width;
  347. q->param.mfx.FrameInfo.Height = frames_hwctx->surfaces[0].Info.Height;
  348. }
  349. if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
  350. q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
  351. q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den;
  352. } else {
  353. q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den;
  354. q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num;
  355. }
  356. q->param.mfx.Interleaved = 1;
  357. q->param.mfx.Quality = av_clip(avctx->global_quality, 1, 100);
  358. q->param.mfx.RestartInterval = 0;
  359. return 0;
  360. }
  361. static int init_video_param(AVCodecContext *avctx, QSVEncContext *q)
  362. {
  363. enum AVPixelFormat sw_format = avctx->pix_fmt == AV_PIX_FMT_QSV ?
  364. avctx->sw_pix_fmt : avctx->pix_fmt;
  365. const AVPixFmtDescriptor *desc;
  366. float quant;
  367. int ret;
  368. ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
  369. if (ret < 0)
  370. return AVERROR_BUG;
  371. q->param.mfx.CodecId = ret;
  372. if (avctx->level > 0)
  373. q->param.mfx.CodecLevel = avctx->level;
  374. q->param.mfx.CodecProfile = q->profile;
  375. q->param.mfx.TargetUsage = q->preset;
  376. q->param.mfx.GopPicSize = FFMAX(0, avctx->gop_size);
  377. q->param.mfx.GopRefDist = FFMAX(-1, avctx->max_b_frames) + 1;
  378. q->param.mfx.GopOptFlag = avctx->flags & AV_CODEC_FLAG_CLOSED_GOP ?
  379. MFX_GOP_CLOSED : 0;
  380. q->param.mfx.IdrInterval = q->idr_interval;
  381. q->param.mfx.NumSlice = avctx->slices;
  382. q->param.mfx.NumRefFrame = FFMAX(0, avctx->refs);
  383. q->param.mfx.EncodedOrder = 0;
  384. q->param.mfx.BufferSizeInKB = 0;
  385. desc = av_pix_fmt_desc_get(sw_format);
  386. if (!desc)
  387. return AVERROR_BUG;
  388. ff_qsv_map_pixfmt(sw_format, &q->param.mfx.FrameInfo.FourCC);
  389. q->param.mfx.FrameInfo.CropX = 0;
  390. q->param.mfx.FrameInfo.CropY = 0;
  391. q->param.mfx.FrameInfo.CropW = avctx->width;
  392. q->param.mfx.FrameInfo.CropH = avctx->height;
  393. q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
  394. q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
  395. q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  396. q->param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
  397. q->param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
  398. q->param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
  399. // TODO: detect version of MFX--if the minor version is greater than
  400. // or equal to 19, then can use the same alignment settings as H.264
  401. // for HEVC
  402. q->width_align = avctx->codec_id == AV_CODEC_ID_HEVC ? 32 : 16;
  403. q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, q->width_align);
  404. if (avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT) {
  405. // it is important that PicStruct be setup correctly from the
  406. // start--otherwise, encoding doesn't work and results in a bunch
  407. // of incompatible video parameter errors
  408. q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
  409. // height alignment always must be 32 for interlaced video
  410. q->height_align = 32;
  411. } else {
  412. q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  413. // for progressive video, the height should be aligned to 16 for
  414. // H.264. For HEVC, depending on the version of MFX, it should be
  415. // either 32 or 16. The lower number is better if possible.
  416. q->height_align = avctx->codec_id == AV_CODEC_ID_HEVC ? 32 : 16;
  417. }
  418. q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, q->height_align);
  419. if (avctx->hw_frames_ctx) {
  420. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  421. AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
  422. q->param.mfx.FrameInfo.Width = frames_hwctx->surfaces[0].Info.Width;
  423. q->param.mfx.FrameInfo.Height = frames_hwctx->surfaces[0].Info.Height;
  424. }
  425. if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
  426. q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
  427. q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den;
  428. } else {
  429. q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den;
  430. q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num;
  431. }
  432. ret = select_rc_mode(avctx, q);
  433. if (ret < 0)
  434. return ret;
  435. switch (q->param.mfx.RateControlMethod) {
  436. case MFX_RATECONTROL_CBR:
  437. case MFX_RATECONTROL_VBR:
  438. #if QSV_HAVE_VCM
  439. case MFX_RATECONTROL_VCM:
  440. #endif
  441. q->param.mfx.InitialDelayInKB = avctx->rc_initial_buffer_occupancy / 1000;
  442. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  443. q->param.mfx.MaxKbps = avctx->rc_max_rate / 1000;
  444. break;
  445. case MFX_RATECONTROL_CQP:
  446. quant = avctx->global_quality / FF_QP2LAMBDA;
  447. q->param.mfx.QPI = av_clip(quant * fabs(avctx->i_quant_factor) + avctx->i_quant_offset, 0, 51);
  448. q->param.mfx.QPP = av_clip(quant, 0, 51);
  449. q->param.mfx.QPB = av_clip(quant * fabs(avctx->b_quant_factor) + avctx->b_quant_offset, 0, 51);
  450. break;
  451. case MFX_RATECONTROL_AVBR:
  452. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  453. q->param.mfx.Convergence = q->avbr_convergence;
  454. q->param.mfx.Accuracy = q->avbr_accuracy;
  455. break;
  456. #if QSV_HAVE_LA
  457. case MFX_RATECONTROL_LA:
  458. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  459. q->extco2.LookAheadDepth = q->look_ahead_depth;
  460. break;
  461. #if QSV_HAVE_ICQ
  462. case MFX_RATECONTROL_LA_ICQ:
  463. q->extco2.LookAheadDepth = q->look_ahead_depth;
  464. case MFX_RATECONTROL_ICQ:
  465. q->param.mfx.ICQQuality = avctx->global_quality;
  466. break;
  467. #endif
  468. #endif
  469. }
  470. // the HEVC encoder plugin currently fails if coding options
  471. // are provided
  472. if (avctx->codec_id != AV_CODEC_ID_HEVC) {
  473. q->extco.Header.BufferId = MFX_EXTBUFF_CODING_OPTION;
  474. q->extco.Header.BufferSz = sizeof(q->extco);
  475. #if FF_API_CODER_TYPE
  476. FF_DISABLE_DEPRECATION_WARNINGS
  477. if (avctx->coder_type != 0)
  478. q->cavlc = avctx->coder_type == FF_CODER_TYPE_VLC;
  479. FF_ENABLE_DEPRECATION_WARNINGS
  480. #endif
  481. q->extco.CAVLC = q->cavlc ? MFX_CODINGOPTION_ON
  482. : MFX_CODINGOPTION_UNKNOWN;
  483. q->extco.PicTimingSEI = q->pic_timing_sei ?
  484. MFX_CODINGOPTION_ON : MFX_CODINGOPTION_UNKNOWN;
  485. if (q->rdo >= 0)
  486. q->extco.RateDistortionOpt = q->rdo > 0 ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  487. if (avctx->codec_id == AV_CODEC_ID_H264) {
  488. if (avctx->strict_std_compliance != FF_COMPLIANCE_NORMAL)
  489. q->extco.NalHrdConformance = avctx->strict_std_compliance > FF_COMPLIANCE_NORMAL ?
  490. MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  491. if (q->single_sei_nal_unit >= 0)
  492. q->extco.SingleSeiNalUnit = q->single_sei_nal_unit ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  493. if (q->recovery_point_sei >= 0)
  494. q->extco.RecoveryPointSEI = q->recovery_point_sei ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  495. q->extco.MaxDecFrameBuffering = q->max_dec_frame_buffering;
  496. }
  497. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco;
  498. #if QSV_HAVE_CO2
  499. if (avctx->codec_id == AV_CODEC_ID_H264) {
  500. q->extco2.Header.BufferId = MFX_EXTBUFF_CODING_OPTION2;
  501. q->extco2.Header.BufferSz = sizeof(q->extco2);
  502. if (q->int_ref_type >= 0)
  503. q->extco2.IntRefType = q->int_ref_type;
  504. if (q->int_ref_cycle_size >= 0)
  505. q->extco2.IntRefCycleSize = q->int_ref_cycle_size;
  506. if (q->int_ref_qp_delta != INT16_MIN)
  507. q->extco2.IntRefQPDelta = q->int_ref_qp_delta;
  508. if (q->bitrate_limit >= 0)
  509. q->extco2.BitrateLimit = q->bitrate_limit ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  510. if (q->mbbrc >= 0)
  511. q->extco2.MBBRC = q->mbbrc ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  512. if (q->extbrc >= 0)
  513. q->extco2.ExtBRC = q->extbrc ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  514. if (q->max_frame_size >= 0)
  515. q->extco2.MaxFrameSize = q->max_frame_size;
  516. #if QSV_HAVE_MAX_SLICE_SIZE
  517. if (q->max_slice_size >= 0)
  518. q->extco2.MaxSliceSize = q->max_slice_size;
  519. #endif
  520. #if QSV_HAVE_TRELLIS
  521. q->extco2.Trellis = q->trellis;
  522. #endif
  523. #if QSV_HAVE_BREF_TYPE
  524. #if FF_API_PRIVATE_OPT
  525. FF_DISABLE_DEPRECATION_WARNINGS
  526. if (avctx->b_frame_strategy >= 0)
  527. q->b_strategy = avctx->b_frame_strategy;
  528. FF_ENABLE_DEPRECATION_WARNINGS
  529. #endif
  530. if (q->b_strategy >= 0)
  531. q->extco2.BRefType = q->b_strategy ? MFX_B_REF_PYRAMID : MFX_B_REF_OFF;
  532. if (q->adaptive_i >= 0)
  533. q->extco2.AdaptiveI = q->adaptive_i ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  534. if (q->adaptive_b >= 0)
  535. q->extco2.AdaptiveB = q->adaptive_b ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  536. #endif
  537. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco2;
  538. #if QSV_HAVE_LA_DS
  539. q->extco2.LookAheadDS = q->look_ahead_downsampling;
  540. #endif
  541. }
  542. #endif
  543. }
  544. if (!rc_supported(q)) {
  545. av_log(avctx, AV_LOG_ERROR,
  546. "Selected ratecontrol mode is not supported by the QSV "
  547. "runtime. Choose a different mode.\n");
  548. return AVERROR(ENOSYS);
  549. }
  550. return 0;
  551. }
  552. static int qsv_retrieve_enc_jpeg_params(AVCodecContext *avctx, QSVEncContext *q)
  553. {
  554. int ret = 0;
  555. ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param);
  556. if (ret < 0)
  557. return ff_qsv_print_error(avctx, ret,
  558. "Error calling GetVideoParam");
  559. q->packet_size = q->param.mfx.BufferSizeInKB * 1000;
  560. // for qsv mjpeg the return value maybe 0 so alloc the buffer
  561. if (q->packet_size == 0)
  562. q->packet_size = q->param.mfx.FrameInfo.Height * q->param.mfx.FrameInfo.Width * 4;
  563. return 0;
  564. }
  565. static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q)
  566. {
  567. AVCPBProperties *cpb_props;
  568. uint8_t sps_buf[128];
  569. uint8_t pps_buf[128];
  570. mfxExtCodingOptionSPSPPS extradata = {
  571. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION_SPSPPS,
  572. .Header.BufferSz = sizeof(extradata),
  573. .SPSBuffer = sps_buf, .SPSBufSize = sizeof(sps_buf),
  574. .PPSBuffer = pps_buf, .PPSBufSize = sizeof(pps_buf)
  575. };
  576. mfxExtCodingOption co = {
  577. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION,
  578. .Header.BufferSz = sizeof(co),
  579. };
  580. #if QSV_HAVE_CO2
  581. mfxExtCodingOption2 co2 = {
  582. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION2,
  583. .Header.BufferSz = sizeof(co2),
  584. };
  585. #endif
  586. mfxExtBuffer *ext_buffers[] = {
  587. (mfxExtBuffer*)&extradata,
  588. (mfxExtBuffer*)&co,
  589. #if QSV_HAVE_CO2
  590. (mfxExtBuffer*)&co2,
  591. #endif
  592. };
  593. int need_pps = avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO;
  594. int ret;
  595. q->param.ExtParam = ext_buffers;
  596. q->param.NumExtParam = FF_ARRAY_ELEMS(ext_buffers);
  597. ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param);
  598. if (ret < 0)
  599. return ff_qsv_print_error(avctx, ret,
  600. "Error calling GetVideoParam");
  601. q->packet_size = q->param.mfx.BufferSizeInKB * 1000;
  602. if (!extradata.SPSBufSize || (need_pps && !extradata.PPSBufSize)) {
  603. av_log(avctx, AV_LOG_ERROR, "No extradata returned from libmfx.\n");
  604. return AVERROR_UNKNOWN;
  605. }
  606. avctx->extradata = av_malloc(extradata.SPSBufSize + need_pps * extradata.PPSBufSize +
  607. AV_INPUT_BUFFER_PADDING_SIZE);
  608. if (!avctx->extradata)
  609. return AVERROR(ENOMEM);
  610. memcpy(avctx->extradata, sps_buf, extradata.SPSBufSize);
  611. if (need_pps)
  612. memcpy(avctx->extradata + extradata.SPSBufSize, pps_buf, extradata.PPSBufSize);
  613. avctx->extradata_size = extradata.SPSBufSize + need_pps * extradata.PPSBufSize;
  614. memset(avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  615. cpb_props = ff_add_cpb_side_data(avctx);
  616. if (!cpb_props)
  617. return AVERROR(ENOMEM);
  618. cpb_props->max_bitrate = avctx->rc_max_rate;
  619. cpb_props->min_bitrate = avctx->rc_min_rate;
  620. cpb_props->avg_bitrate = avctx->bit_rate;
  621. cpb_props->buffer_size = avctx->rc_buffer_size;
  622. dump_video_param(avctx, q, ext_buffers + 1);
  623. return 0;
  624. }
  625. static int qsv_init_opaque_alloc(AVCodecContext *avctx, QSVEncContext *q)
  626. {
  627. AVQSVContext *qsv = avctx->hwaccel_context;
  628. mfxFrameSurface1 *surfaces;
  629. int nb_surfaces, i;
  630. nb_surfaces = qsv->nb_opaque_surfaces + q->req.NumFrameSuggested + q->async_depth;
  631. q->opaque_alloc_buf = av_buffer_allocz(sizeof(*surfaces) * nb_surfaces);
  632. if (!q->opaque_alloc_buf)
  633. return AVERROR(ENOMEM);
  634. q->opaque_surfaces = av_malloc_array(nb_surfaces, sizeof(*q->opaque_surfaces));
  635. if (!q->opaque_surfaces)
  636. return AVERROR(ENOMEM);
  637. surfaces = (mfxFrameSurface1*)q->opaque_alloc_buf->data;
  638. for (i = 0; i < nb_surfaces; i++) {
  639. surfaces[i].Info = q->req.Info;
  640. q->opaque_surfaces[i] = surfaces + i;
  641. }
  642. q->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  643. q->opaque_alloc.Header.BufferSz = sizeof(q->opaque_alloc);
  644. q->opaque_alloc.In.Surfaces = q->opaque_surfaces;
  645. q->opaque_alloc.In.NumSurface = nb_surfaces;
  646. q->opaque_alloc.In.Type = q->req.Type;
  647. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->opaque_alloc;
  648. qsv->nb_opaque_surfaces = nb_surfaces;
  649. qsv->opaque_surfaces = q->opaque_alloc_buf;
  650. qsv->opaque_alloc_type = q->req.Type;
  651. return 0;
  652. }
  653. static int qsvenc_init_session(AVCodecContext *avctx, QSVEncContext *q)
  654. {
  655. int ret;
  656. if (avctx->hwaccel_context) {
  657. AVQSVContext *qsv = avctx->hwaccel_context;
  658. q->session = qsv->session;
  659. } else if (avctx->hw_frames_ctx) {
  660. q->frames_ctx.hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
  661. if (!q->frames_ctx.hw_frames_ctx)
  662. return AVERROR(ENOMEM);
  663. ret = ff_qsv_init_session_frames(avctx, &q->internal_session,
  664. &q->frames_ctx, q->load_plugins,
  665. q->param.IOPattern == MFX_IOPATTERN_IN_OPAQUE_MEMORY);
  666. if (ret < 0) {
  667. av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
  668. return ret;
  669. }
  670. q->session = q->internal_session;
  671. } else if (avctx->hw_device_ctx) {
  672. ret = ff_qsv_init_session_device(avctx, &q->internal_session,
  673. avctx->hw_device_ctx, q->load_plugins);
  674. if (ret < 0)
  675. return ret;
  676. q->session = q->internal_session;
  677. } else {
  678. ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
  679. q->load_plugins);
  680. if (ret < 0)
  681. return ret;
  682. q->session = q->internal_session;
  683. }
  684. return 0;
  685. }
  686. int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
  687. {
  688. int iopattern = 0;
  689. int opaque_alloc = 0;
  690. int ret;
  691. q->param.AsyncDepth = q->async_depth;
  692. q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
  693. (sizeof(AVPacket) + sizeof(mfxSyncPoint*) + sizeof(mfxBitstream*)));
  694. if (!q->async_fifo)
  695. return AVERROR(ENOMEM);
  696. if (avctx->hwaccel_context) {
  697. AVQSVContext *qsv = avctx->hwaccel_context;
  698. iopattern = qsv->iopattern;
  699. opaque_alloc = qsv->opaque_alloc;
  700. }
  701. if (avctx->hw_frames_ctx) {
  702. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  703. AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
  704. if (!iopattern) {
  705. if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
  706. iopattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  707. else if (frames_hwctx->frame_type &
  708. (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
  709. iopattern = MFX_IOPATTERN_IN_VIDEO_MEMORY;
  710. }
  711. }
  712. if (!iopattern)
  713. iopattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
  714. q->param.IOPattern = iopattern;
  715. ret = qsvenc_init_session(avctx, q);
  716. if (ret < 0)
  717. return ret;
  718. // in the mfxInfoMFX struct, JPEG is different from other codecs
  719. switch (avctx->codec_id) {
  720. case AV_CODEC_ID_MJPEG:
  721. ret = init_video_param_jpeg(avctx, q);
  722. break;
  723. default:
  724. ret = init_video_param(avctx, q);
  725. break;
  726. }
  727. if (ret < 0)
  728. return ret;
  729. ret = MFXVideoENCODE_Query(q->session, &q->param, &q->param);
  730. if (ret == MFX_WRN_PARTIAL_ACCELERATION) {
  731. av_log(avctx, AV_LOG_WARNING, "Encoder will work with partial HW acceleration\n");
  732. } else if (ret < 0) {
  733. return ff_qsv_print_error(avctx, ret,
  734. "Error querying encoder params");
  735. }
  736. ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req);
  737. if (ret < 0)
  738. return ff_qsv_print_error(avctx, ret,
  739. "Error querying (IOSurf) the encoding parameters");
  740. if (opaque_alloc) {
  741. ret = qsv_init_opaque_alloc(avctx, q);
  742. if (ret < 0)
  743. return ret;
  744. }
  745. if (avctx->hwaccel_context) {
  746. AVQSVContext *qsv = avctx->hwaccel_context;
  747. int i, j;
  748. q->extparam = av_mallocz_array(qsv->nb_ext_buffers + q->nb_extparam_internal,
  749. sizeof(*q->extparam));
  750. if (!q->extparam)
  751. return AVERROR(ENOMEM);
  752. q->param.ExtParam = q->extparam;
  753. for (i = 0; i < qsv->nb_ext_buffers; i++)
  754. q->param.ExtParam[i] = qsv->ext_buffers[i];
  755. q->param.NumExtParam = qsv->nb_ext_buffers;
  756. for (i = 0; i < q->nb_extparam_internal; i++) {
  757. for (j = 0; j < qsv->nb_ext_buffers; j++) {
  758. if (qsv->ext_buffers[j]->BufferId == q->extparam_internal[i]->BufferId)
  759. break;
  760. }
  761. if (j < qsv->nb_ext_buffers)
  762. continue;
  763. q->param.ExtParam[q->param.NumExtParam++] = q->extparam_internal[i];
  764. }
  765. } else {
  766. q->param.ExtParam = q->extparam_internal;
  767. q->param.NumExtParam = q->nb_extparam_internal;
  768. }
  769. ret = MFXVideoENCODE_Init(q->session, &q->param);
  770. if (ret < 0)
  771. return ff_qsv_print_error(avctx, ret,
  772. "Error initializing the encoder");
  773. else if (ret > 0)
  774. ff_qsv_print_warning(avctx, ret,
  775. "Warning in encoder initialization");
  776. switch (avctx->codec_id) {
  777. case AV_CODEC_ID_MJPEG:
  778. ret = qsv_retrieve_enc_jpeg_params(avctx, q);
  779. break;
  780. default:
  781. ret = qsv_retrieve_enc_params(avctx, q);
  782. break;
  783. }
  784. if (ret < 0) {
  785. av_log(avctx, AV_LOG_ERROR, "Error retrieving encoding parameters.\n");
  786. return ret;
  787. }
  788. q->avctx = avctx;
  789. return 0;
  790. }
  791. static void free_encoder_ctrl_payloads(mfxEncodeCtrl* enc_ctrl)
  792. {
  793. if (enc_ctrl) {
  794. int i;
  795. for (i = 0; i < enc_ctrl->NumPayload && i < QSV_MAX_ENC_PAYLOAD; i++) {
  796. av_free(enc_ctrl->Payload[i]);
  797. }
  798. enc_ctrl->NumPayload = 0;
  799. }
  800. }
  801. static void clear_unused_frames(QSVEncContext *q)
  802. {
  803. QSVFrame *cur = q->work_frames;
  804. while (cur) {
  805. if (cur->used && !cur->surface.Data.Locked) {
  806. free_encoder_ctrl_payloads(&cur->enc_ctrl);
  807. av_frame_unref(cur->frame);
  808. cur->used = 0;
  809. }
  810. cur = cur->next;
  811. }
  812. }
  813. static int get_free_frame(QSVEncContext *q, QSVFrame **f)
  814. {
  815. QSVFrame *frame, **last;
  816. clear_unused_frames(q);
  817. frame = q->work_frames;
  818. last = &q->work_frames;
  819. while (frame) {
  820. if (!frame->used) {
  821. *f = frame;
  822. frame->used = 1;
  823. return 0;
  824. }
  825. last = &frame->next;
  826. frame = frame->next;
  827. }
  828. frame = av_mallocz(sizeof(*frame));
  829. if (!frame)
  830. return AVERROR(ENOMEM);
  831. frame->frame = av_frame_alloc();
  832. if (!frame->frame) {
  833. av_freep(&frame);
  834. return AVERROR(ENOMEM);
  835. }
  836. frame->enc_ctrl.Payload = av_mallocz(sizeof(mfxPayload*) * QSV_MAX_ENC_PAYLOAD);
  837. if (!frame->enc_ctrl.Payload) {
  838. av_freep(&frame);
  839. return AVERROR(ENOMEM);
  840. }
  841. *last = frame;
  842. *f = frame;
  843. frame->used = 1;
  844. return 0;
  845. }
  846. static int submit_frame(QSVEncContext *q, const AVFrame *frame,
  847. QSVFrame **new_frame)
  848. {
  849. QSVFrame *qf;
  850. int ret;
  851. ret = get_free_frame(q, &qf);
  852. if (ret < 0)
  853. return ret;
  854. if (frame->format == AV_PIX_FMT_QSV) {
  855. ret = av_frame_ref(qf->frame, frame);
  856. if (ret < 0)
  857. return ret;
  858. qf->surface = *(mfxFrameSurface1*)qf->frame->data[3];
  859. if (q->frames_ctx.mids) {
  860. ret = ff_qsv_find_surface_idx(&q->frames_ctx, qf);
  861. if (ret < 0)
  862. return ret;
  863. qf->surface.Data.MemId = &q->frames_ctx.mids[ret];
  864. }
  865. } else {
  866. /* make a copy if the input is not padded as libmfx requires */
  867. if (frame->height & 31 || frame->linesize[0] & (q->width_align - 1)) {
  868. qf->frame->height = FFALIGN(frame->height, q->height_align);
  869. qf->frame->width = FFALIGN(frame->width, q->width_align);
  870. ret = ff_get_buffer(q->avctx, qf->frame, AV_GET_BUFFER_FLAG_REF);
  871. if (ret < 0)
  872. return ret;
  873. qf->frame->height = frame->height;
  874. qf->frame->width = frame->width;
  875. ret = av_frame_copy(qf->frame, frame);
  876. if (ret < 0) {
  877. av_frame_unref(qf->frame);
  878. return ret;
  879. }
  880. } else {
  881. ret = av_frame_ref(qf->frame, frame);
  882. if (ret < 0)
  883. return ret;
  884. }
  885. qf->surface.Info = q->param.mfx.FrameInfo;
  886. qf->surface.Info.PicStruct =
  887. !frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
  888. frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  889. MFX_PICSTRUCT_FIELD_BFF;
  890. if (frame->repeat_pict == 1)
  891. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
  892. else if (frame->repeat_pict == 2)
  893. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
  894. else if (frame->repeat_pict == 4)
  895. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
  896. qf->surface.Data.PitchLow = qf->frame->linesize[0];
  897. qf->surface.Data.Y = qf->frame->data[0];
  898. qf->surface.Data.UV = qf->frame->data[1];
  899. }
  900. qf->surface.Data.TimeStamp = av_rescale_q(frame->pts, q->avctx->time_base, (AVRational){1, 90000});
  901. *new_frame = qf;
  902. return 0;
  903. }
  904. static void print_interlace_msg(AVCodecContext *avctx, QSVEncContext *q)
  905. {
  906. if (q->param.mfx.CodecId == MFX_CODEC_AVC) {
  907. if (q->param.mfx.CodecProfile == MFX_PROFILE_AVC_BASELINE ||
  908. q->param.mfx.CodecLevel < MFX_LEVEL_AVC_21 ||
  909. q->param.mfx.CodecLevel > MFX_LEVEL_AVC_41)
  910. av_log(avctx, AV_LOG_WARNING,
  911. "Interlaced coding is supported"
  912. " at Main/High Profile Level 2.1-4.1\n");
  913. }
  914. }
  915. static int encode_frame(AVCodecContext *avctx, QSVEncContext *q,
  916. const AVFrame *frame)
  917. {
  918. AVPacket new_pkt = { 0 };
  919. mfxBitstream *bs;
  920. mfxFrameSurface1 *surf = NULL;
  921. mfxSyncPoint *sync = NULL;
  922. QSVFrame *qsv_frame = NULL;
  923. mfxEncodeCtrl* enc_ctrl = NULL;
  924. int ret;
  925. if (frame) {
  926. ret = submit_frame(q, frame, &qsv_frame);
  927. if (ret < 0) {
  928. av_log(avctx, AV_LOG_ERROR, "Error submitting the frame for encoding.\n");
  929. return ret;
  930. }
  931. }
  932. if (qsv_frame) {
  933. surf = &qsv_frame->surface;
  934. enc_ctrl = &qsv_frame->enc_ctrl;
  935. }
  936. ret = av_new_packet(&new_pkt, q->packet_size);
  937. if (ret < 0) {
  938. av_log(avctx, AV_LOG_ERROR, "Error allocating the output packet\n");
  939. return ret;
  940. }
  941. bs = av_mallocz(sizeof(*bs));
  942. if (!bs) {
  943. av_packet_unref(&new_pkt);
  944. return AVERROR(ENOMEM);
  945. }
  946. bs->Data = new_pkt.data;
  947. bs->MaxLength = new_pkt.size;
  948. if (q->set_encode_ctrl_cb) {
  949. q->set_encode_ctrl_cb(avctx, frame, &qsv_frame->enc_ctrl);
  950. }
  951. sync = av_mallocz(sizeof(*sync));
  952. if (!sync) {
  953. av_freep(&bs);
  954. av_packet_unref(&new_pkt);
  955. return AVERROR(ENOMEM);
  956. }
  957. do {
  958. ret = MFXVideoENCODE_EncodeFrameAsync(q->session, enc_ctrl, surf, bs, sync);
  959. if (ret == MFX_WRN_DEVICE_BUSY)
  960. av_usleep(500);
  961. } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_WRN_IN_EXECUTION);
  962. if (ret > 0)
  963. ff_qsv_print_warning(avctx, ret, "Warning during encoding");
  964. if (ret < 0) {
  965. av_packet_unref(&new_pkt);
  966. av_freep(&bs);
  967. av_freep(&sync);
  968. return (ret == MFX_ERR_MORE_DATA) ?
  969. 0 : ff_qsv_print_error(avctx, ret, "Error during encoding");
  970. }
  971. if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame)
  972. print_interlace_msg(avctx, q);
  973. if (*sync) {
  974. av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  975. av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
  976. av_fifo_generic_write(q->async_fifo, &bs, sizeof(bs), NULL);
  977. } else {
  978. av_freep(&sync);
  979. av_packet_unref(&new_pkt);
  980. av_freep(&bs);
  981. }
  982. return 0;
  983. }
  984. int ff_qsv_encode(AVCodecContext *avctx, QSVEncContext *q,
  985. AVPacket *pkt, const AVFrame *frame, int *got_packet)
  986. {
  987. int ret;
  988. ret = encode_frame(avctx, q, frame);
  989. if (ret < 0)
  990. return ret;
  991. if (!av_fifo_space(q->async_fifo) ||
  992. (!frame && av_fifo_size(q->async_fifo))) {
  993. AVPacket new_pkt;
  994. mfxBitstream *bs;
  995. mfxSyncPoint *sync;
  996. av_fifo_generic_read(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  997. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  998. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  999. do {
  1000. ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
  1001. } while (ret == MFX_WRN_IN_EXECUTION);
  1002. new_pkt.dts = av_rescale_q(bs->DecodeTimeStamp, (AVRational){1, 90000}, avctx->time_base);
  1003. new_pkt.pts = av_rescale_q(bs->TimeStamp, (AVRational){1, 90000}, avctx->time_base);
  1004. new_pkt.size = bs->DataLength;
  1005. if (bs->FrameType & MFX_FRAMETYPE_IDR ||
  1006. bs->FrameType & MFX_FRAMETYPE_xIDR)
  1007. new_pkt.flags |= AV_PKT_FLAG_KEY;
  1008. #if FF_API_CODED_FRAME
  1009. FF_DISABLE_DEPRECATION_WARNINGS
  1010. if (bs->FrameType & MFX_FRAMETYPE_I || bs->FrameType & MFX_FRAMETYPE_xI)
  1011. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
  1012. else if (bs->FrameType & MFX_FRAMETYPE_P || bs->FrameType & MFX_FRAMETYPE_xP)
  1013. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
  1014. else if (bs->FrameType & MFX_FRAMETYPE_B || bs->FrameType & MFX_FRAMETYPE_xB)
  1015. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
  1016. FF_ENABLE_DEPRECATION_WARNINGS
  1017. #endif
  1018. av_freep(&bs);
  1019. av_freep(&sync);
  1020. if (pkt->data) {
  1021. if (pkt->size < new_pkt.size) {
  1022. av_log(avctx, AV_LOG_ERROR, "Submitted buffer not large enough: %d < %d\n",
  1023. pkt->size, new_pkt.size);
  1024. av_packet_unref(&new_pkt);
  1025. return AVERROR(EINVAL);
  1026. }
  1027. memcpy(pkt->data, new_pkt.data, new_pkt.size);
  1028. pkt->size = new_pkt.size;
  1029. ret = av_packet_copy_props(pkt, &new_pkt);
  1030. av_packet_unref(&new_pkt);
  1031. if (ret < 0)
  1032. return ret;
  1033. } else
  1034. *pkt = new_pkt;
  1035. *got_packet = 1;
  1036. }
  1037. return 0;
  1038. }
  1039. int ff_qsv_enc_close(AVCodecContext *avctx, QSVEncContext *q)
  1040. {
  1041. QSVFrame *cur;
  1042. if (q->session)
  1043. MFXVideoENCODE_Close(q->session);
  1044. if (q->internal_session)
  1045. MFXClose(q->internal_session);
  1046. q->session = NULL;
  1047. q->internal_session = NULL;
  1048. av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
  1049. av_buffer_unref(&q->frames_ctx.mids_buf);
  1050. cur = q->work_frames;
  1051. while (cur) {
  1052. q->work_frames = cur->next;
  1053. av_frame_free(&cur->frame);
  1054. av_free(cur->enc_ctrl.Payload);
  1055. av_freep(&cur);
  1056. cur = q->work_frames;
  1057. }
  1058. while (q->async_fifo && av_fifo_size(q->async_fifo)) {
  1059. AVPacket pkt;
  1060. mfxSyncPoint *sync;
  1061. mfxBitstream *bs;
  1062. av_fifo_generic_read(q->async_fifo, &pkt, sizeof(pkt), NULL);
  1063. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  1064. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  1065. av_freep(&sync);
  1066. av_freep(&bs);
  1067. av_packet_unref(&pkt);
  1068. }
  1069. av_fifo_free(q->async_fifo);
  1070. q->async_fifo = NULL;
  1071. av_freep(&q->opaque_surfaces);
  1072. av_buffer_unref(&q->opaque_alloc_buf);
  1073. av_freep(&q->extparam);
  1074. return 0;
  1075. }