You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

950 lines
32KB

  1. /*
  2. * Intel MediaSDK QSV encoder utility functions
  3. *
  4. * copyright (c) 2013 Yukinori Yamazoe
  5. * copyright (c) 2015 Anton Khirnov
  6. *
  7. * This file is part of Libav.
  8. *
  9. * Libav is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * Libav is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with Libav; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <string.h>
  24. #include <sys/types.h>
  25. #include <mfx/mfxvideo.h>
  26. #include "libavutil/common.h"
  27. #include "libavutil/mem.h"
  28. #include "libavutil/log.h"
  29. #include "libavutil/time.h"
  30. #include "libavutil/imgutils.h"
  31. #include "avcodec.h"
  32. #include "internal.h"
  33. #include "qsv.h"
  34. #include "qsv_internal.h"
  35. #include "qsvenc.h"
  36. static const struct {
  37. mfxU16 profile;
  38. const char *name;
  39. } profile_names[] = {
  40. { MFX_PROFILE_AVC_BASELINE, "baseline" },
  41. { MFX_PROFILE_AVC_MAIN, "main" },
  42. { MFX_PROFILE_AVC_EXTENDED, "extended" },
  43. { MFX_PROFILE_AVC_HIGH, "high" },
  44. #if QSV_VERSION_ATLEAST(1, 15)
  45. { MFX_PROFILE_AVC_HIGH_422, "high 422" },
  46. #endif
  47. #if QSV_VERSION_ATLEAST(1, 4)
  48. { MFX_PROFILE_AVC_CONSTRAINED_BASELINE, "constrained baseline" },
  49. { MFX_PROFILE_AVC_CONSTRAINED_HIGH, "constrained high" },
  50. { MFX_PROFILE_AVC_PROGRESSIVE_HIGH, "progressive high" },
  51. #endif
  52. { MFX_PROFILE_MPEG2_SIMPLE, "simple" },
  53. { MFX_PROFILE_MPEG2_MAIN, "main" },
  54. { MFX_PROFILE_MPEG2_HIGH, "high" },
  55. { MFX_PROFILE_VC1_SIMPLE, "simple" },
  56. { MFX_PROFILE_VC1_MAIN, "main" },
  57. { MFX_PROFILE_VC1_ADVANCED, "advanced" },
  58. #if QSV_VERSION_ATLEAST(1, 8)
  59. { MFX_PROFILE_HEVC_MAIN, "main" },
  60. { MFX_PROFILE_HEVC_MAIN10, "main10" },
  61. { MFX_PROFILE_HEVC_MAINSP, "mainsp" },
  62. #endif
  63. };
  64. static const char *print_profile(mfxU16 profile)
  65. {
  66. int i;
  67. for (i = 0; i < FF_ARRAY_ELEMS(profile_names); i++)
  68. if (profile == profile_names[i].profile)
  69. return profile_names[i].name;
  70. return "unknown";
  71. }
  72. static const struct {
  73. mfxU16 rc_mode;
  74. const char *name;
  75. } rc_names[] = {
  76. { MFX_RATECONTROL_CBR, "CBR" },
  77. { MFX_RATECONTROL_VBR, "VBR" },
  78. { MFX_RATECONTROL_CQP, "CQP" },
  79. { MFX_RATECONTROL_AVBR, "AVBR" },
  80. #if QSV_HAVE_LA
  81. { MFX_RATECONTROL_LA, "LA" },
  82. #endif
  83. #if QSV_HAVE_ICQ
  84. { MFX_RATECONTROL_ICQ, "ICQ" },
  85. { MFX_RATECONTROL_LA_ICQ, "LA_ICQ" },
  86. #endif
  87. #if QSV_HAVE_VCM
  88. { MFX_RATECONTROL_VCM, "VCM" },
  89. #endif
  90. #if QSV_VERSION_ATLEAST(1, 10)
  91. { MFX_RATECONTROL_LA_EXT, "LA_EXT" },
  92. #endif
  93. #if QSV_HAVE_LA_HRD
  94. { MFX_RATECONTROL_LA_HRD, "LA_HRD" },
  95. #endif
  96. #if QSV_HAVE_QVBR
  97. { MFX_RATECONTROL_QVBR, "QVBR" },
  98. #endif
  99. };
  100. static const char *print_ratecontrol(mfxU16 rc_mode)
  101. {
  102. int i;
  103. for (i = 0; i < FF_ARRAY_ELEMS(rc_names); i++)
  104. if (rc_mode == rc_names[i].rc_mode)
  105. return rc_names[i].name;
  106. return "unknown";
  107. }
  108. static const char *print_threestate(mfxU16 val)
  109. {
  110. if (val == MFX_CODINGOPTION_ON)
  111. return "ON";
  112. else if (val == MFX_CODINGOPTION_OFF)
  113. return "OFF";
  114. return "unknown";
  115. }
  116. static void dump_video_param(AVCodecContext *avctx, QSVEncContext *q,
  117. mfxExtBuffer **coding_opts)
  118. {
  119. mfxInfoMFX *info = &q->param.mfx;
  120. mfxExtCodingOption *co = (mfxExtCodingOption*)coding_opts[0];
  121. #if QSV_HAVE_CO2
  122. mfxExtCodingOption2 *co2 = (mfxExtCodingOption2*)coding_opts[1];
  123. #endif
  124. #if QSV_HAVE_CO3
  125. mfxExtCodingOption3 *co3 = (mfxExtCodingOption3*)coding_opts[2];
  126. #endif
  127. av_log(avctx, AV_LOG_VERBOSE, "profile: %s; level: %"PRIu16"\n",
  128. print_profile(info->CodecProfile), info->CodecLevel);
  129. av_log(avctx, AV_LOG_VERBOSE, "GopPicSize: %"PRIu16"; GopRefDist: %"PRIu16"; GopOptFlag: ",
  130. info->GopPicSize, info->GopRefDist);
  131. if (info->GopOptFlag & MFX_GOP_CLOSED)
  132. av_log(avctx, AV_LOG_VERBOSE, "closed ");
  133. if (info->GopOptFlag & MFX_GOP_STRICT)
  134. av_log(avctx, AV_LOG_VERBOSE, "strict ");
  135. av_log(avctx, AV_LOG_VERBOSE, "; IdrInterval: %"PRIu16"\n", info->IdrInterval);
  136. av_log(avctx, AV_LOG_VERBOSE, "TargetUsage: %"PRIu16"; RateControlMethod: %s\n",
  137. info->TargetUsage, print_ratecontrol(info->RateControlMethod));
  138. if (info->RateControlMethod == MFX_RATECONTROL_CBR ||
  139. info->RateControlMethod == MFX_RATECONTROL_VBR
  140. #if QSV_HAVE_VCM
  141. || info->RateControlMethod == MFX_RATECONTROL_VCM
  142. #endif
  143. ) {
  144. av_log(avctx, AV_LOG_VERBOSE,
  145. "InitialDelayInKB: %"PRIu16"; TargetKbps: %"PRIu16"; MaxKbps: %"PRIu16"\n",
  146. info->InitialDelayInKB, info->TargetKbps, info->MaxKbps);
  147. } else if (info->RateControlMethod == MFX_RATECONTROL_CQP) {
  148. av_log(avctx, AV_LOG_VERBOSE, "QPI: %"PRIu16"; QPP: %"PRIu16"; QPB: %"PRIu16"\n",
  149. info->QPI, info->QPP, info->QPB);
  150. } else if (info->RateControlMethod == MFX_RATECONTROL_AVBR) {
  151. av_log(avctx, AV_LOG_VERBOSE,
  152. "TargetKbps: %"PRIu16"; Accuracy: %"PRIu16"; Convergence: %"PRIu16"\n",
  153. info->TargetKbps, info->Accuracy, info->Convergence);
  154. }
  155. #if QSV_HAVE_LA
  156. else if (info->RateControlMethod == MFX_RATECONTROL_LA
  157. #if QSV_HAVE_LA_HRD
  158. || info->RateControlMethod == MFX_RATECONTROL_LA_HRD
  159. #endif
  160. ) {
  161. av_log(avctx, AV_LOG_VERBOSE,
  162. "TargetKbps: %"PRIu16"; LookAheadDepth: %"PRIu16"\n",
  163. info->TargetKbps, co2->LookAheadDepth);
  164. }
  165. #endif
  166. #if QSV_HAVE_ICQ
  167. else if (info->RateControlMethod == MFX_RATECONTROL_ICQ) {
  168. av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"\n", info->ICQQuality);
  169. } else if (info->RateControlMethod == MFX_RATECONTROL_LA_ICQ) {
  170. av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"; LookAheadDepth: %"PRIu16"\n",
  171. info->ICQQuality, co2->LookAheadDepth);
  172. }
  173. #endif
  174. #if QSV_HAVE_QVBR
  175. else if (info->RateControlMethod == MFX_RATECONTROL_QVBR) {
  176. av_log(avctx, AV_LOG_VERBOSE, "QVBRQuality: %"PRIu16"\n",
  177. co3->QVBRQuality);
  178. }
  179. #endif
  180. av_log(avctx, AV_LOG_VERBOSE, "NumSlice: %"PRIu16"; NumRefFrame: %"PRIu16"\n",
  181. info->NumSlice, info->NumRefFrame);
  182. av_log(avctx, AV_LOG_VERBOSE, "RateDistortionOpt: %s\n",
  183. print_threestate(co->RateDistortionOpt));
  184. #if QSV_HAVE_CO2
  185. av_log(avctx, AV_LOG_VERBOSE,
  186. "RecoveryPointSEI: %s IntRefType: %"PRIu16"; IntRefCycleSize: %"PRIu16"; IntRefQPDelta: %"PRId16"\n",
  187. print_threestate(co->RecoveryPointSEI), co2->IntRefType, co2->IntRefCycleSize, co2->IntRefQPDelta);
  188. av_log(avctx, AV_LOG_VERBOSE, "MaxFrameSize: %"PRIu16"; ", co2->MaxFrameSize);
  189. #if QSV_VERSION_ATLEAST(1, 9)
  190. av_log(avctx, AV_LOG_VERBOSE, "MaxSliceSize: %"PRIu16"; ", co2->MaxSliceSize);
  191. #endif
  192. av_log(avctx, AV_LOG_VERBOSE, "\n");
  193. av_log(avctx, AV_LOG_VERBOSE,
  194. "BitrateLimit: %s; MBBRC: %s; ExtBRC: %s\n",
  195. print_threestate(co2->BitrateLimit), print_threestate(co2->MBBRC),
  196. print_threestate(co2->ExtBRC));
  197. #if QSV_HAVE_TRELLIS
  198. av_log(avctx, AV_LOG_VERBOSE, "Trellis: ");
  199. if (co2->Trellis & MFX_TRELLIS_OFF) {
  200. av_log(avctx, AV_LOG_VERBOSE, "off");
  201. } else if (!co2->Trellis) {
  202. av_log(avctx, AV_LOG_VERBOSE, "auto");
  203. } else {
  204. if (co2->Trellis & MFX_TRELLIS_I) av_log(avctx, AV_LOG_VERBOSE, "I");
  205. if (co2->Trellis & MFX_TRELLIS_P) av_log(avctx, AV_LOG_VERBOSE, "P");
  206. if (co2->Trellis & MFX_TRELLIS_B) av_log(avctx, AV_LOG_VERBOSE, "B");
  207. }
  208. av_log(avctx, AV_LOG_VERBOSE, "\n");
  209. #endif
  210. #if QSV_VERSION_ATLEAST(1, 8)
  211. av_log(avctx, AV_LOG_VERBOSE,
  212. "RepeatPPS: %s; NumMbPerSlice: %"PRIu16"; LookAheadDS: ",
  213. print_threestate(co2->RepeatPPS), co2->NumMbPerSlice);
  214. switch (co2->LookAheadDS) {
  215. case MFX_LOOKAHEAD_DS_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break;
  216. case MFX_LOOKAHEAD_DS_2x: av_log(avctx, AV_LOG_VERBOSE, "2x"); break;
  217. case MFX_LOOKAHEAD_DS_4x: av_log(avctx, AV_LOG_VERBOSE, "4x"); break;
  218. default: av_log(avctx, AV_LOG_VERBOSE, "unknown"); break;
  219. }
  220. av_log(avctx, AV_LOG_VERBOSE, "\n");
  221. av_log(avctx, AV_LOG_VERBOSE, "AdaptiveI: %s; AdaptiveB: %s; BRefType: ",
  222. print_threestate(co2->AdaptiveI), print_threestate(co2->AdaptiveB));
  223. switch (co2->BRefType) {
  224. case MFX_B_REF_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break;
  225. case MFX_B_REF_PYRAMID: av_log(avctx, AV_LOG_VERBOSE, "pyramid"); break;
  226. default: av_log(avctx, AV_LOG_VERBOSE, "auto"); break;
  227. }
  228. av_log(avctx, AV_LOG_VERBOSE, "\n");
  229. #endif
  230. #if QSV_VERSION_ATLEAST(1, 9)
  231. av_log(avctx, AV_LOG_VERBOSE,
  232. "MinQPI: %"PRIu8"; MaxQPI: %"PRIu8"; MinQPP: %"PRIu8"; MaxQPP: %"PRIu8"; MinQPB: %"PRIu8"; MaxQPB: %"PRIu8"\n",
  233. co2->MinQPI, co2->MaxQPI, co2->MinQPP, co2->MaxQPP, co2->MinQPB, co2->MaxQPB);
  234. #endif
  235. #endif
  236. if (avctx->codec_id == AV_CODEC_ID_H264) {
  237. av_log(avctx, AV_LOG_VERBOSE, "Entropy coding: %s; MaxDecFrameBuffering: %"PRIu16"\n",
  238. co->CAVLC == MFX_CODINGOPTION_ON ? "CAVLC" : "CABAC", co->MaxDecFrameBuffering);
  239. av_log(avctx, AV_LOG_VERBOSE,
  240. "NalHrdConformance: %s; SingleSeiNalUnit: %s; VuiVclHrdParameters: %s VuiNalHrdParameters: %s\n",
  241. print_threestate(co->NalHrdConformance), print_threestate(co->SingleSeiNalUnit),
  242. print_threestate(co->VuiVclHrdParameters), print_threestate(co->VuiNalHrdParameters));
  243. }
  244. }
  245. static int select_rc_mode(AVCodecContext *avctx, QSVEncContext *q)
  246. {
  247. const char *rc_desc;
  248. mfxU16 rc_mode;
  249. int want_la = q->la_depth >= 0;
  250. int want_qscale = !!(avctx->flags & AV_CODEC_FLAG_QSCALE);
  251. int want_vcm = q->vcm;
  252. if (want_la && !QSV_HAVE_LA) {
  253. av_log(avctx, AV_LOG_ERROR,
  254. "Lookahead ratecontrol mode requested, but is not supported by this SDK version\n");
  255. return AVERROR(ENOSYS);
  256. }
  257. if (want_vcm && !QSV_HAVE_VCM) {
  258. av_log(avctx, AV_LOG_ERROR,
  259. "VCM ratecontrol mode requested, but is not supported by this SDK version\n");
  260. return AVERROR(ENOSYS);
  261. }
  262. if (want_la + want_qscale + want_vcm > 1) {
  263. av_log(avctx, AV_LOG_ERROR,
  264. "More than one of: { constant qscale, lookahead, VCM } requested, "
  265. "only one of them can be used at a time.\n");
  266. return AVERROR(EINVAL);
  267. }
  268. if (want_qscale) {
  269. rc_mode = MFX_RATECONTROL_CQP;
  270. rc_desc = "constant quantization parameter (CQP)";
  271. }
  272. #if QSV_HAVE_VCM
  273. else if (want_vcm) {
  274. rc_mode = MFX_RATECONTROL_VCM;
  275. rc_desc = "video conferencing mode (VCM)";
  276. }
  277. #endif
  278. #if QSV_HAVE_LA
  279. else if (want_la) {
  280. rc_mode = MFX_RATECONTROL_LA;
  281. rc_desc = "VBR with lookahead (LA)";
  282. #if QSV_HAVE_ICQ
  283. if (avctx->global_quality > 0) {
  284. rc_mode = MFX_RATECONTROL_LA_ICQ;
  285. rc_desc = "intelligent constant quality with lookahead (LA_ICQ)";
  286. }
  287. #endif
  288. }
  289. #endif
  290. #if QSV_HAVE_ICQ
  291. else if (avctx->global_quality > 0) {
  292. rc_mode = MFX_RATECONTROL_ICQ;
  293. rc_desc = "intelligent constant quality (ICQ)";
  294. }
  295. #endif
  296. else if (avctx->rc_max_rate == avctx->bit_rate) {
  297. rc_mode = MFX_RATECONTROL_CBR;
  298. rc_desc = "constant bitrate (CBR)";
  299. } else if (!avctx->rc_max_rate) {
  300. rc_mode = MFX_RATECONTROL_AVBR;
  301. rc_desc = "average variable bitrate (AVBR)";
  302. } else {
  303. rc_mode = MFX_RATECONTROL_VBR;
  304. rc_desc = "variable bitrate (VBR)";
  305. }
  306. q->param.mfx.RateControlMethod = rc_mode;
  307. av_log(avctx, AV_LOG_VERBOSE, "Using the %s ratecontrol method\n", rc_desc);
  308. return 0;
  309. }
  310. static int rc_supported(QSVEncContext *q)
  311. {
  312. mfxVideoParam param_out = { .mfx.CodecId = q->param.mfx.CodecId };
  313. mfxStatus ret;
  314. ret = MFXVideoENCODE_Query(q->session, &q->param, &param_out);
  315. if (ret < 0 ||
  316. param_out.mfx.RateControlMethod != q->param.mfx.RateControlMethod)
  317. return 0;
  318. return 1;
  319. }
  320. static int init_video_param(AVCodecContext *avctx, QSVEncContext *q)
  321. {
  322. float quant;
  323. int ret;
  324. ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
  325. if (ret < 0)
  326. return AVERROR_BUG;
  327. q->param.mfx.CodecId = ret;
  328. q->width_align = avctx->codec_id == AV_CODEC_ID_HEVC ? 32 : 16;
  329. if (avctx->level > 0)
  330. q->param.mfx.CodecLevel = avctx->level;
  331. q->param.mfx.CodecProfile = q->profile;
  332. q->param.mfx.TargetUsage = q->preset;
  333. q->param.mfx.GopPicSize = FFMAX(0, avctx->gop_size);
  334. q->param.mfx.GopRefDist = FFMAX(-1, avctx->max_b_frames) + 1;
  335. q->param.mfx.GopOptFlag = avctx->flags & AV_CODEC_FLAG_CLOSED_GOP ?
  336. MFX_GOP_CLOSED : 0;
  337. q->param.mfx.IdrInterval = q->idr_interval;
  338. q->param.mfx.NumSlice = avctx->slices;
  339. q->param.mfx.NumRefFrame = FFMAX(0, avctx->refs);
  340. q->param.mfx.EncodedOrder = 0;
  341. q->param.mfx.BufferSizeInKB = 0;
  342. q->param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12;
  343. q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, q->width_align);
  344. q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, 32);
  345. q->param.mfx.FrameInfo.CropX = 0;
  346. q->param.mfx.FrameInfo.CropY = 0;
  347. q->param.mfx.FrameInfo.CropW = avctx->width;
  348. q->param.mfx.FrameInfo.CropH = avctx->height;
  349. q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
  350. q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
  351. q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  352. q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  353. q->param.mfx.FrameInfo.BitDepthLuma = 8;
  354. q->param.mfx.FrameInfo.BitDepthChroma = 8;
  355. if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
  356. q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
  357. q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den;
  358. } else {
  359. q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den;
  360. q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num;
  361. }
  362. ret = select_rc_mode(avctx, q);
  363. if (ret < 0)
  364. return ret;
  365. switch (q->param.mfx.RateControlMethod) {
  366. case MFX_RATECONTROL_CBR:
  367. case MFX_RATECONTROL_VBR:
  368. #if QSV_HAVE_VCM
  369. case MFX_RATECONTROL_VCM:
  370. #endif
  371. q->param.mfx.InitialDelayInKB = avctx->rc_initial_buffer_occupancy / 1000;
  372. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  373. q->param.mfx.MaxKbps = avctx->rc_max_rate / 1000;
  374. break;
  375. case MFX_RATECONTROL_CQP:
  376. quant = avctx->global_quality / FF_QP2LAMBDA;
  377. q->param.mfx.QPI = av_clip(quant * fabs(avctx->i_quant_factor) + avctx->i_quant_offset, 0, 51);
  378. q->param.mfx.QPP = av_clip(quant, 0, 51);
  379. q->param.mfx.QPB = av_clip(quant * fabs(avctx->b_quant_factor) + avctx->b_quant_offset, 0, 51);
  380. break;
  381. case MFX_RATECONTROL_AVBR:
  382. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  383. q->param.mfx.Convergence = q->avbr_convergence;
  384. q->param.mfx.Accuracy = q->avbr_accuracy;
  385. break;
  386. #if QSV_HAVE_LA
  387. case MFX_RATECONTROL_LA:
  388. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  389. q->extco2.LookAheadDepth = q->la_depth;
  390. break;
  391. #if QSV_HAVE_ICQ
  392. case MFX_RATECONTROL_LA_ICQ:
  393. q->extco2.LookAheadDepth = q->la_depth;
  394. case MFX_RATECONTROL_ICQ:
  395. q->param.mfx.ICQQuality = avctx->global_quality;
  396. break;
  397. #endif
  398. #endif
  399. }
  400. // the HEVC encoder plugin currently fails if coding options
  401. // are provided
  402. if (avctx->codec_id != AV_CODEC_ID_HEVC) {
  403. q->extco.Header.BufferId = MFX_EXTBUFF_CODING_OPTION;
  404. q->extco.Header.BufferSz = sizeof(q->extco);
  405. q->extco.CAVLC = avctx->coder_type == FF_CODER_TYPE_VLC ?
  406. MFX_CODINGOPTION_ON : MFX_CODINGOPTION_UNKNOWN;
  407. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco;
  408. #if QSV_HAVE_CO2
  409. if (avctx->codec_id == AV_CODEC_ID_H264) {
  410. q->extco2.Header.BufferId = MFX_EXTBUFF_CODING_OPTION2;
  411. q->extco2.Header.BufferSz = sizeof(q->extco2);
  412. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco2;
  413. }
  414. #endif
  415. }
  416. if (!rc_supported(q)) {
  417. av_log(avctx, AV_LOG_ERROR,
  418. "Selected ratecontrol mode is not supported by the QSV "
  419. "runtime. Choose a different mode.\n");
  420. return AVERROR(ENOSYS);
  421. }
  422. return 0;
  423. }
  424. static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q)
  425. {
  426. uint8_t sps_buf[128];
  427. uint8_t pps_buf[128];
  428. mfxExtCodingOptionSPSPPS extradata = {
  429. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION_SPSPPS,
  430. .Header.BufferSz = sizeof(extradata),
  431. .SPSBuffer = sps_buf, .SPSBufSize = sizeof(sps_buf),
  432. .PPSBuffer = pps_buf, .PPSBufSize = sizeof(pps_buf)
  433. };
  434. mfxExtCodingOption co = {
  435. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION,
  436. .Header.BufferSz = sizeof(co),
  437. };
  438. #if QSV_HAVE_CO2
  439. mfxExtCodingOption2 co2 = {
  440. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION2,
  441. .Header.BufferSz = sizeof(co2),
  442. };
  443. #endif
  444. #if QSV_HAVE_CO3
  445. mfxExtCodingOption3 co3 = {
  446. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION3,
  447. .Header.BufferSz = sizeof(co3),
  448. };
  449. #endif
  450. mfxExtBuffer *ext_buffers[] = {
  451. (mfxExtBuffer*)&extradata,
  452. (mfxExtBuffer*)&co,
  453. #if QSV_HAVE_CO2
  454. (mfxExtBuffer*)&co2,
  455. #endif
  456. #if QSV_HAVE_CO3
  457. (mfxExtBuffer*)&co3,
  458. #endif
  459. };
  460. int need_pps = avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO;
  461. int ret;
  462. q->param.ExtParam = ext_buffers;
  463. q->param.NumExtParam = FF_ARRAY_ELEMS(ext_buffers);
  464. ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param);
  465. if (ret < 0)
  466. return ff_qsv_error(ret);
  467. q->packet_size = q->param.mfx.BufferSizeInKB * 1000;
  468. if (!extradata.SPSBufSize || (need_pps && !extradata.PPSBufSize)) {
  469. av_log(avctx, AV_LOG_ERROR, "No extradata returned from libmfx.\n");
  470. return AVERROR_UNKNOWN;
  471. }
  472. avctx->extradata = av_malloc(extradata.SPSBufSize + need_pps * extradata.PPSBufSize +
  473. AV_INPUT_BUFFER_PADDING_SIZE);
  474. if (!avctx->extradata)
  475. return AVERROR(ENOMEM);
  476. memcpy(avctx->extradata, sps_buf, extradata.SPSBufSize);
  477. if (need_pps)
  478. memcpy(avctx->extradata + extradata.SPSBufSize, pps_buf, extradata.PPSBufSize);
  479. avctx->extradata_size = extradata.SPSBufSize + need_pps * extradata.PPSBufSize;
  480. memset(avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  481. dump_video_param(avctx, q, ext_buffers + 1);
  482. return 0;
  483. }
  484. static int qsv_init_opaque_alloc(AVCodecContext *avctx, QSVEncContext *q)
  485. {
  486. AVQSVContext *qsv = avctx->hwaccel_context;
  487. mfxFrameSurface1 *surfaces;
  488. int nb_surfaces, i;
  489. nb_surfaces = qsv->nb_opaque_surfaces + q->req.NumFrameSuggested + q->async_depth;
  490. q->opaque_alloc_buf = av_buffer_allocz(sizeof(*surfaces) * nb_surfaces);
  491. if (!q->opaque_alloc_buf)
  492. return AVERROR(ENOMEM);
  493. q->opaque_surfaces = av_malloc_array(nb_surfaces, sizeof(*q->opaque_surfaces));
  494. if (!q->opaque_surfaces)
  495. return AVERROR(ENOMEM);
  496. surfaces = (mfxFrameSurface1*)q->opaque_alloc_buf->data;
  497. for (i = 0; i < nb_surfaces; i++) {
  498. surfaces[i].Info = q->req.Info;
  499. q->opaque_surfaces[i] = surfaces + i;
  500. }
  501. q->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  502. q->opaque_alloc.Header.BufferSz = sizeof(q->opaque_alloc);
  503. q->opaque_alloc.In.Surfaces = q->opaque_surfaces;
  504. q->opaque_alloc.In.NumSurface = nb_surfaces;
  505. q->opaque_alloc.In.Type = q->req.Type;
  506. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->opaque_alloc;
  507. qsv->nb_opaque_surfaces = nb_surfaces;
  508. qsv->opaque_surfaces = q->opaque_alloc_buf;
  509. qsv->opaque_alloc_type = q->req.Type;
  510. return 0;
  511. }
  512. int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
  513. {
  514. int opaque_alloc = 0;
  515. int ret;
  516. q->param.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
  517. q->param.AsyncDepth = q->async_depth;
  518. q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
  519. (sizeof(AVPacket) + sizeof(mfxSyncPoint) + sizeof(mfxBitstream*)));
  520. if (!q->async_fifo)
  521. return AVERROR(ENOMEM);
  522. if (avctx->hwaccel_context) {
  523. AVQSVContext *qsv = avctx->hwaccel_context;
  524. q->session = qsv->session;
  525. q->param.IOPattern = qsv->iopattern;
  526. opaque_alloc = qsv->opaque_alloc;
  527. }
  528. if (!q->session) {
  529. ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
  530. q->load_plugins);
  531. if (ret < 0)
  532. return ret;
  533. q->session = q->internal_session;
  534. }
  535. ret = init_video_param(avctx, q);
  536. if (ret < 0)
  537. return ret;
  538. ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req);
  539. if (ret < 0) {
  540. av_log(avctx, AV_LOG_ERROR, "Error querying the encoding parameters\n");
  541. return ff_qsv_error(ret);
  542. }
  543. if (opaque_alloc) {
  544. ret = qsv_init_opaque_alloc(avctx, q);
  545. if (ret < 0)
  546. return ret;
  547. }
  548. if (avctx->hwaccel_context) {
  549. AVQSVContext *qsv = avctx->hwaccel_context;
  550. int i, j;
  551. q->extparam = av_mallocz_array(qsv->nb_ext_buffers + q->nb_extparam_internal,
  552. sizeof(*q->extparam));
  553. if (!q->extparam)
  554. return AVERROR(ENOMEM);
  555. q->param.ExtParam = q->extparam;
  556. for (i = 0; i < qsv->nb_ext_buffers; i++)
  557. q->param.ExtParam[i] = qsv->ext_buffers[i];
  558. q->param.NumExtParam = qsv->nb_ext_buffers;
  559. for (i = 0; i < q->nb_extparam_internal; i++) {
  560. for (j = 0; j < qsv->nb_ext_buffers; j++) {
  561. if (qsv->ext_buffers[j]->BufferId == q->extparam_internal[i]->BufferId)
  562. break;
  563. }
  564. if (j < qsv->nb_ext_buffers)
  565. continue;
  566. q->param.ExtParam[q->param.NumExtParam++] = q->extparam_internal[i];
  567. }
  568. } else {
  569. q->param.ExtParam = q->extparam_internal;
  570. q->param.NumExtParam = q->nb_extparam_internal;
  571. }
  572. ret = MFXVideoENCODE_Init(q->session, &q->param);
  573. if (ret < 0) {
  574. av_log(avctx, AV_LOG_ERROR, "Error initializing the encoder\n");
  575. return ff_qsv_error(ret);
  576. }
  577. ret = qsv_retrieve_enc_params(avctx, q);
  578. if (ret < 0) {
  579. av_log(avctx, AV_LOG_ERROR, "Error retrieving encoding parameters.\n");
  580. return ret;
  581. }
  582. q->avctx = avctx;
  583. return 0;
  584. }
  585. static void clear_unused_frames(QSVEncContext *q)
  586. {
  587. QSVFrame *cur = q->work_frames;
  588. while (cur) {
  589. if (cur->surface && !cur->surface->Data.Locked) {
  590. cur->surface = NULL;
  591. av_frame_unref(cur->frame);
  592. }
  593. cur = cur->next;
  594. }
  595. }
  596. static int get_free_frame(QSVEncContext *q, QSVFrame **f)
  597. {
  598. QSVFrame *frame, **last;
  599. clear_unused_frames(q);
  600. frame = q->work_frames;
  601. last = &q->work_frames;
  602. while (frame) {
  603. if (!frame->surface) {
  604. *f = frame;
  605. return 0;
  606. }
  607. last = &frame->next;
  608. frame = frame->next;
  609. }
  610. frame = av_mallocz(sizeof(*frame));
  611. if (!frame)
  612. return AVERROR(ENOMEM);
  613. frame->frame = av_frame_alloc();
  614. if (!frame->frame) {
  615. av_freep(&frame);
  616. return AVERROR(ENOMEM);
  617. }
  618. *last = frame;
  619. *f = frame;
  620. return 0;
  621. }
  622. static int submit_frame(QSVEncContext *q, const AVFrame *frame,
  623. mfxFrameSurface1 **surface)
  624. {
  625. QSVFrame *qf;
  626. int ret;
  627. ret = get_free_frame(q, &qf);
  628. if (ret < 0)
  629. return ret;
  630. if (frame->format == AV_PIX_FMT_QSV) {
  631. ret = av_frame_ref(qf->frame, frame);
  632. if (ret < 0)
  633. return ret;
  634. qf->surface = (mfxFrameSurface1*)qf->frame->data[3];
  635. } else {
  636. /* make a copy if the input is not padded as libmfx requires */
  637. if (frame->height & 31 || frame->linesize[0] & (q->width_align - 1)) {
  638. qf->frame->height = FFALIGN(frame->height, 32);
  639. qf->frame->width = FFALIGN(frame->width, q->width_align);
  640. ret = ff_get_buffer(q->avctx, qf->frame, AV_GET_BUFFER_FLAG_REF);
  641. if (ret < 0)
  642. return ret;
  643. qf->frame->height = frame->height;
  644. qf->frame->width = frame->width;
  645. ret = av_frame_copy(qf->frame, frame);
  646. if (ret < 0) {
  647. av_frame_unref(qf->frame);
  648. return ret;
  649. }
  650. } else {
  651. ret = av_frame_ref(qf->frame, frame);
  652. if (ret < 0)
  653. return ret;
  654. }
  655. qf->surface_internal.Info = q->param.mfx.FrameInfo;
  656. qf->surface_internal.Info.PicStruct =
  657. !frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
  658. frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  659. MFX_PICSTRUCT_FIELD_BFF;
  660. if (frame->repeat_pict == 1)
  661. qf->surface_internal.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
  662. else if (frame->repeat_pict == 2)
  663. qf->surface_internal.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
  664. else if (frame->repeat_pict == 4)
  665. qf->surface_internal.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
  666. qf->surface_internal.Data.PitchLow = qf->frame->linesize[0];
  667. qf->surface_internal.Data.Y = qf->frame->data[0];
  668. qf->surface_internal.Data.UV = qf->frame->data[1];
  669. qf->surface = &qf->surface_internal;
  670. }
  671. qf->surface->Data.TimeStamp = av_rescale_q(frame->pts, q->avctx->time_base, (AVRational){1, 90000});
  672. *surface = qf->surface;
  673. return 0;
  674. }
  675. static void print_interlace_msg(AVCodecContext *avctx, QSVEncContext *q)
  676. {
  677. if (q->param.mfx.CodecId == MFX_CODEC_AVC) {
  678. if (q->param.mfx.CodecProfile == MFX_PROFILE_AVC_BASELINE ||
  679. q->param.mfx.CodecLevel < MFX_LEVEL_AVC_21 ||
  680. q->param.mfx.CodecLevel > MFX_LEVEL_AVC_41)
  681. av_log(avctx, AV_LOG_WARNING,
  682. "Interlaced coding is supported"
  683. " at Main/High Profile Level 2.1-4.1\n");
  684. }
  685. }
  686. int ff_qsv_encode(AVCodecContext *avctx, QSVEncContext *q,
  687. AVPacket *pkt, const AVFrame *frame, int *got_packet)
  688. {
  689. AVPacket new_pkt = { 0 };
  690. mfxBitstream *bs;
  691. mfxFrameSurface1 *surf = NULL;
  692. mfxSyncPoint sync = NULL;
  693. int ret;
  694. if (frame) {
  695. ret = submit_frame(q, frame, &surf);
  696. if (ret < 0) {
  697. av_log(avctx, AV_LOG_ERROR, "Error submitting the frame for encoding.\n");
  698. return ret;
  699. }
  700. }
  701. ret = av_new_packet(&new_pkt, q->packet_size);
  702. if (ret < 0) {
  703. av_log(avctx, AV_LOG_ERROR, "Error allocating the output packet\n");
  704. return ret;
  705. }
  706. bs = av_mallocz(sizeof(*bs));
  707. if (!bs) {
  708. av_packet_unref(&new_pkt);
  709. return AVERROR(ENOMEM);
  710. }
  711. bs->Data = new_pkt.data;
  712. bs->MaxLength = new_pkt.size;
  713. do {
  714. ret = MFXVideoENCODE_EncodeFrameAsync(q->session, NULL, surf, bs, &sync);
  715. if (ret == MFX_WRN_DEVICE_BUSY)
  716. av_usleep(1);
  717. } while (ret > 0);
  718. if (ret < 0) {
  719. av_packet_unref(&new_pkt);
  720. av_freep(&bs);
  721. return (ret == MFX_ERR_MORE_DATA) ? 0 : ff_qsv_error(ret);
  722. }
  723. if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame)
  724. print_interlace_msg(avctx, q);
  725. if (sync) {
  726. av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  727. av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
  728. av_fifo_generic_write(q->async_fifo, &bs, sizeof(bs), NULL);
  729. } else {
  730. av_packet_unref(&new_pkt);
  731. av_freep(&bs);
  732. }
  733. if (!av_fifo_space(q->async_fifo) ||
  734. (!frame && av_fifo_size(q->async_fifo))) {
  735. av_fifo_generic_read(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  736. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  737. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  738. MFXVideoCORE_SyncOperation(q->session, sync, 60000);
  739. new_pkt.dts = av_rescale_q(bs->DecodeTimeStamp, (AVRational){1, 90000}, avctx->time_base);
  740. new_pkt.pts = av_rescale_q(bs->TimeStamp, (AVRational){1, 90000}, avctx->time_base);
  741. new_pkt.size = bs->DataLength;
  742. if (bs->FrameType & MFX_FRAMETYPE_IDR ||
  743. bs->FrameType & MFX_FRAMETYPE_xIDR)
  744. new_pkt.flags |= AV_PKT_FLAG_KEY;
  745. #if FF_API_CODED_FRAME
  746. FF_DISABLE_DEPRECATION_WARNINGS
  747. if (bs->FrameType & MFX_FRAMETYPE_I || bs->FrameType & MFX_FRAMETYPE_xI)
  748. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
  749. else if (bs->FrameType & MFX_FRAMETYPE_P || bs->FrameType & MFX_FRAMETYPE_xP)
  750. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
  751. else if (bs->FrameType & MFX_FRAMETYPE_B || bs->FrameType & MFX_FRAMETYPE_xB)
  752. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
  753. FF_ENABLE_DEPRECATION_WARNINGS
  754. #endif
  755. av_freep(&bs);
  756. if (pkt->data) {
  757. if (pkt->size < new_pkt.size) {
  758. av_log(avctx, AV_LOG_ERROR, "Submitted buffer not large enough: %d < %d\n",
  759. pkt->size, new_pkt.size);
  760. av_packet_unref(&new_pkt);
  761. return AVERROR(EINVAL);
  762. }
  763. memcpy(pkt->data, new_pkt.data, new_pkt.size);
  764. pkt->size = new_pkt.size;
  765. ret = av_packet_copy_props(pkt, &new_pkt);
  766. av_packet_unref(&new_pkt);
  767. if (ret < 0)
  768. return ret;
  769. } else
  770. *pkt = new_pkt;
  771. *got_packet = 1;
  772. }
  773. return 0;
  774. }
  775. int ff_qsv_enc_close(AVCodecContext *avctx, QSVEncContext *q)
  776. {
  777. QSVFrame *cur;
  778. if (q->session)
  779. MFXVideoENCODE_Close(q->session);
  780. if (q->internal_session)
  781. MFXClose(q->internal_session);
  782. q->session = NULL;
  783. q->internal_session = NULL;
  784. cur = q->work_frames;
  785. while (cur) {
  786. q->work_frames = cur->next;
  787. av_frame_free(&cur->frame);
  788. av_freep(&cur);
  789. cur = q->work_frames;
  790. }
  791. while (q->async_fifo && av_fifo_size(q->async_fifo)) {
  792. AVPacket pkt;
  793. mfxSyncPoint sync;
  794. mfxBitstream *bs;
  795. av_fifo_generic_read(q->async_fifo, &pkt, sizeof(pkt), NULL);
  796. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  797. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  798. av_freep(&bs);
  799. av_packet_unref(&pkt);
  800. }
  801. av_fifo_free(q->async_fifo);
  802. q->async_fifo = NULL;
  803. av_freep(&q->opaque_surfaces);
  804. av_buffer_unref(&q->opaque_alloc_buf);
  805. av_freep(&q->extparam);
  806. return 0;
  807. }