You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1352 lines
47KB

  1. /*
  2. * Intel MediaSDK QSV encoder utility functions
  3. *
  4. * copyright (c) 2013 Yukinori Yamazoe
  5. * copyright (c) 2015 Anton Khirnov
  6. *
  7. * This file is part of Libav.
  8. *
  9. * Libav is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * Libav is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with Libav; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <string.h>
  24. #include <sys/types.h>
  25. #include <mfx/mfxvideo.h>
  26. #include "libavutil/common.h"
  27. #include "libavutil/hwcontext.h"
  28. #include "libavutil/hwcontext_qsv.h"
  29. #include "libavutil/mem.h"
  30. #include "libavutil/log.h"
  31. #include "libavutil/time.h"
  32. #include "libavutil/imgutils.h"
  33. #include "avcodec.h"
  34. #include "internal.h"
  35. #include "qsv.h"
  36. #include "qsv_internal.h"
  37. #include "qsvenc.h"
  38. static const struct {
  39. mfxU16 profile;
  40. const char *name;
  41. } profile_names[] = {
  42. { MFX_PROFILE_AVC_BASELINE, "baseline" },
  43. { MFX_PROFILE_AVC_MAIN, "main" },
  44. { MFX_PROFILE_AVC_EXTENDED, "extended" },
  45. { MFX_PROFILE_AVC_HIGH, "high" },
  46. #if QSV_VERSION_ATLEAST(1, 15)
  47. { MFX_PROFILE_AVC_HIGH_422, "high 422" },
  48. #endif
  49. #if QSV_VERSION_ATLEAST(1, 4)
  50. { MFX_PROFILE_AVC_CONSTRAINED_BASELINE, "constrained baseline" },
  51. { MFX_PROFILE_AVC_CONSTRAINED_HIGH, "constrained high" },
  52. { MFX_PROFILE_AVC_PROGRESSIVE_HIGH, "progressive high" },
  53. #endif
  54. { MFX_PROFILE_MPEG2_SIMPLE, "simple" },
  55. { MFX_PROFILE_MPEG2_MAIN, "main" },
  56. { MFX_PROFILE_MPEG2_HIGH, "high" },
  57. { MFX_PROFILE_VC1_SIMPLE, "simple" },
  58. { MFX_PROFILE_VC1_MAIN, "main" },
  59. { MFX_PROFILE_VC1_ADVANCED, "advanced" },
  60. #if QSV_VERSION_ATLEAST(1, 8)
  61. { MFX_PROFILE_HEVC_MAIN, "main" },
  62. { MFX_PROFILE_HEVC_MAIN10, "main10" },
  63. { MFX_PROFILE_HEVC_MAINSP, "mainsp" },
  64. #endif
  65. };
  66. static const char *print_profile(mfxU16 profile)
  67. {
  68. int i;
  69. for (i = 0; i < FF_ARRAY_ELEMS(profile_names); i++)
  70. if (profile == profile_names[i].profile)
  71. return profile_names[i].name;
  72. return "unknown";
  73. }
  74. static const struct {
  75. mfxU16 rc_mode;
  76. const char *name;
  77. } rc_names[] = {
  78. { MFX_RATECONTROL_CBR, "CBR" },
  79. { MFX_RATECONTROL_VBR, "VBR" },
  80. { MFX_RATECONTROL_CQP, "CQP" },
  81. #if QSV_HAVE_AVBR
  82. { MFX_RATECONTROL_AVBR, "AVBR" },
  83. #endif
  84. #if QSV_HAVE_LA
  85. { MFX_RATECONTROL_LA, "LA" },
  86. #endif
  87. #if QSV_HAVE_ICQ
  88. { MFX_RATECONTROL_ICQ, "ICQ" },
  89. { MFX_RATECONTROL_LA_ICQ, "LA_ICQ" },
  90. #endif
  91. #if QSV_HAVE_VCM
  92. { MFX_RATECONTROL_VCM, "VCM" },
  93. #endif
  94. #if QSV_VERSION_ATLEAST(1, 10)
  95. { MFX_RATECONTROL_LA_EXT, "LA_EXT" },
  96. #endif
  97. #if QSV_HAVE_LA_HRD
  98. { MFX_RATECONTROL_LA_HRD, "LA_HRD" },
  99. #endif
  100. #if QSV_HAVE_QVBR
  101. { MFX_RATECONTROL_QVBR, "QVBR" },
  102. #endif
  103. };
  104. static const char *print_ratecontrol(mfxU16 rc_mode)
  105. {
  106. int i;
  107. for (i = 0; i < FF_ARRAY_ELEMS(rc_names); i++)
  108. if (rc_mode == rc_names[i].rc_mode)
  109. return rc_names[i].name;
  110. return "unknown";
  111. }
  112. static const char *print_threestate(mfxU16 val)
  113. {
  114. if (val == MFX_CODINGOPTION_ON)
  115. return "ON";
  116. else if (val == MFX_CODINGOPTION_OFF)
  117. return "OFF";
  118. return "unknown";
  119. }
  120. static void dump_video_param(AVCodecContext *avctx, QSVEncContext *q,
  121. mfxExtBuffer **coding_opts)
  122. {
  123. mfxInfoMFX *info = &q->param.mfx;
  124. mfxExtCodingOption *co = (mfxExtCodingOption*)coding_opts[0];
  125. #if QSV_HAVE_CO2
  126. mfxExtCodingOption2 *co2 = (mfxExtCodingOption2*)coding_opts[1];
  127. #endif
  128. #if QSV_HAVE_CO3 && QSV_HAVE_QVBR
  129. mfxExtCodingOption3 *co3 = (mfxExtCodingOption3*)coding_opts[2];
  130. #endif
  131. av_log(avctx, AV_LOG_VERBOSE, "profile: %s; level: %"PRIu16"\n",
  132. print_profile(info->CodecProfile), info->CodecLevel);
  133. av_log(avctx, AV_LOG_VERBOSE, "GopPicSize: %"PRIu16"; GopRefDist: %"PRIu16"; GopOptFlag: ",
  134. info->GopPicSize, info->GopRefDist);
  135. if (info->GopOptFlag & MFX_GOP_CLOSED)
  136. av_log(avctx, AV_LOG_VERBOSE, "closed ");
  137. if (info->GopOptFlag & MFX_GOP_STRICT)
  138. av_log(avctx, AV_LOG_VERBOSE, "strict ");
  139. av_log(avctx, AV_LOG_VERBOSE, "; IdrInterval: %"PRIu16"\n", info->IdrInterval);
  140. av_log(avctx, AV_LOG_VERBOSE, "TargetUsage: %"PRIu16"; RateControlMethod: %s\n",
  141. info->TargetUsage, print_ratecontrol(info->RateControlMethod));
  142. if (info->RateControlMethod == MFX_RATECONTROL_CBR ||
  143. info->RateControlMethod == MFX_RATECONTROL_VBR
  144. #if QSV_HAVE_VCM
  145. || info->RateControlMethod == MFX_RATECONTROL_VCM
  146. #endif
  147. ) {
  148. av_log(avctx, AV_LOG_VERBOSE,
  149. "BufferSizeInKB: %"PRIu16"; InitialDelayInKB: %"PRIu16"; TargetKbps: %"PRIu16"; MaxKbps: %"PRIu16"\n",
  150. info->BufferSizeInKB, info->InitialDelayInKB, info->TargetKbps, info->MaxKbps);
  151. } else if (info->RateControlMethod == MFX_RATECONTROL_CQP) {
  152. av_log(avctx, AV_LOG_VERBOSE, "QPI: %"PRIu16"; QPP: %"PRIu16"; QPB: %"PRIu16"\n",
  153. info->QPI, info->QPP, info->QPB);
  154. }
  155. #if QSV_HAVE_AVBR
  156. else if (info->RateControlMethod == MFX_RATECONTROL_AVBR) {
  157. av_log(avctx, AV_LOG_VERBOSE,
  158. "TargetKbps: %"PRIu16"; Accuracy: %"PRIu16"; Convergence: %"PRIu16"\n",
  159. info->TargetKbps, info->Accuracy, info->Convergence);
  160. }
  161. #endif
  162. #if QSV_HAVE_LA
  163. else if (info->RateControlMethod == MFX_RATECONTROL_LA
  164. #if QSV_HAVE_LA_HRD
  165. || info->RateControlMethod == MFX_RATECONTROL_LA_HRD
  166. #endif
  167. ) {
  168. av_log(avctx, AV_LOG_VERBOSE,
  169. "TargetKbps: %"PRIu16"; LookAheadDepth: %"PRIu16"\n",
  170. info->TargetKbps, co2->LookAheadDepth);
  171. }
  172. #endif
  173. #if QSV_HAVE_ICQ
  174. else if (info->RateControlMethod == MFX_RATECONTROL_ICQ) {
  175. av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"\n", info->ICQQuality);
  176. } else if (info->RateControlMethod == MFX_RATECONTROL_LA_ICQ) {
  177. av_log(avctx, AV_LOG_VERBOSE, "ICQQuality: %"PRIu16"; LookAheadDepth: %"PRIu16"\n",
  178. info->ICQQuality, co2->LookAheadDepth);
  179. }
  180. #endif
  181. #if QSV_HAVE_QVBR
  182. else if (info->RateControlMethod == MFX_RATECONTROL_QVBR) {
  183. av_log(avctx, AV_LOG_VERBOSE, "QVBRQuality: %"PRIu16"\n",
  184. co3->QVBRQuality);
  185. }
  186. #endif
  187. av_log(avctx, AV_LOG_VERBOSE, "NumSlice: %"PRIu16"; NumRefFrame: %"PRIu16"\n",
  188. info->NumSlice, info->NumRefFrame);
  189. av_log(avctx, AV_LOG_VERBOSE, "RateDistortionOpt: %s\n",
  190. print_threestate(co->RateDistortionOpt));
  191. #if QSV_HAVE_CO2
  192. av_log(avctx, AV_LOG_VERBOSE,
  193. "RecoveryPointSEI: %s IntRefType: %"PRIu16"; IntRefCycleSize: %"PRIu16"; IntRefQPDelta: %"PRId16"\n",
  194. print_threestate(co->RecoveryPointSEI), co2->IntRefType, co2->IntRefCycleSize, co2->IntRefQPDelta);
  195. av_log(avctx, AV_LOG_VERBOSE, "MaxFrameSize: %"PRIu16"; ", co2->MaxFrameSize);
  196. #if QSV_HAVE_MAX_SLICE_SIZE
  197. av_log(avctx, AV_LOG_VERBOSE, "MaxSliceSize: %"PRIu16"; ", co2->MaxSliceSize);
  198. #endif
  199. av_log(avctx, AV_LOG_VERBOSE, "\n");
  200. av_log(avctx, AV_LOG_VERBOSE,
  201. "BitrateLimit: %s; MBBRC: %s; ExtBRC: %s\n",
  202. print_threestate(co2->BitrateLimit), print_threestate(co2->MBBRC),
  203. print_threestate(co2->ExtBRC));
  204. #if QSV_HAVE_TRELLIS
  205. av_log(avctx, AV_LOG_VERBOSE, "Trellis: ");
  206. if (co2->Trellis & MFX_TRELLIS_OFF) {
  207. av_log(avctx, AV_LOG_VERBOSE, "off");
  208. } else if (!co2->Trellis) {
  209. av_log(avctx, AV_LOG_VERBOSE, "auto");
  210. } else {
  211. if (co2->Trellis & MFX_TRELLIS_I) av_log(avctx, AV_LOG_VERBOSE, "I");
  212. if (co2->Trellis & MFX_TRELLIS_P) av_log(avctx, AV_LOG_VERBOSE, "P");
  213. if (co2->Trellis & MFX_TRELLIS_B) av_log(avctx, AV_LOG_VERBOSE, "B");
  214. }
  215. av_log(avctx, AV_LOG_VERBOSE, "\n");
  216. #endif
  217. #if QSV_VERSION_ATLEAST(1, 8)
  218. av_log(avctx, AV_LOG_VERBOSE,
  219. "RepeatPPS: %s; NumMbPerSlice: %"PRIu16"; LookAheadDS: ",
  220. print_threestate(co2->RepeatPPS), co2->NumMbPerSlice);
  221. switch (co2->LookAheadDS) {
  222. case MFX_LOOKAHEAD_DS_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break;
  223. case MFX_LOOKAHEAD_DS_2x: av_log(avctx, AV_LOG_VERBOSE, "2x"); break;
  224. case MFX_LOOKAHEAD_DS_4x: av_log(avctx, AV_LOG_VERBOSE, "4x"); break;
  225. default: av_log(avctx, AV_LOG_VERBOSE, "unknown"); break;
  226. }
  227. av_log(avctx, AV_LOG_VERBOSE, "\n");
  228. av_log(avctx, AV_LOG_VERBOSE, "AdaptiveI: %s; AdaptiveB: %s; BRefType: ",
  229. print_threestate(co2->AdaptiveI), print_threestate(co2->AdaptiveB));
  230. switch (co2->BRefType) {
  231. case MFX_B_REF_OFF: av_log(avctx, AV_LOG_VERBOSE, "off"); break;
  232. case MFX_B_REF_PYRAMID: av_log(avctx, AV_LOG_VERBOSE, "pyramid"); break;
  233. default: av_log(avctx, AV_LOG_VERBOSE, "auto"); break;
  234. }
  235. av_log(avctx, AV_LOG_VERBOSE, "\n");
  236. #endif
  237. #if QSV_VERSION_ATLEAST(1, 9)
  238. av_log(avctx, AV_LOG_VERBOSE,
  239. "MinQPI: %"PRIu8"; MaxQPI: %"PRIu8"; MinQPP: %"PRIu8"; MaxQPP: %"PRIu8"; MinQPB: %"PRIu8"; MaxQPB: %"PRIu8"\n",
  240. co2->MinQPI, co2->MaxQPI, co2->MinQPP, co2->MaxQPP, co2->MinQPB, co2->MaxQPB);
  241. #endif
  242. #endif
  243. if (avctx->codec_id == AV_CODEC_ID_H264) {
  244. av_log(avctx, AV_LOG_VERBOSE, "Entropy coding: %s; MaxDecFrameBuffering: %"PRIu16"\n",
  245. co->CAVLC == MFX_CODINGOPTION_ON ? "CAVLC" : "CABAC", co->MaxDecFrameBuffering);
  246. av_log(avctx, AV_LOG_VERBOSE,
  247. "NalHrdConformance: %s; SingleSeiNalUnit: %s; VuiVclHrdParameters: %s VuiNalHrdParameters: %s\n",
  248. print_threestate(co->NalHrdConformance), print_threestate(co->SingleSeiNalUnit),
  249. print_threestate(co->VuiVclHrdParameters), print_threestate(co->VuiNalHrdParameters));
  250. }
  251. }
  252. static int select_rc_mode(AVCodecContext *avctx, QSVEncContext *q)
  253. {
  254. const char *rc_desc;
  255. mfxU16 rc_mode;
  256. int want_la = q->la_depth >= 10;
  257. int want_qscale = !!(avctx->flags & AV_CODEC_FLAG_QSCALE);
  258. int want_vcm = q->vcm;
  259. if (want_la && !QSV_HAVE_LA) {
  260. av_log(avctx, AV_LOG_ERROR,
  261. "Lookahead ratecontrol mode requested, but is not supported by this SDK version\n");
  262. return AVERROR(ENOSYS);
  263. }
  264. if (want_vcm && !QSV_HAVE_VCM) {
  265. av_log(avctx, AV_LOG_ERROR,
  266. "VCM ratecontrol mode requested, but is not supported by this SDK version\n");
  267. return AVERROR(ENOSYS);
  268. }
  269. if (want_la + want_qscale + want_vcm > 1) {
  270. av_log(avctx, AV_LOG_ERROR,
  271. "More than one of: { constant qscale, lookahead, VCM } requested, "
  272. "only one of them can be used at a time.\n");
  273. return AVERROR(EINVAL);
  274. }
  275. if (!want_qscale && avctx->global_quality > 0 && !QSV_HAVE_ICQ){
  276. av_log(avctx, AV_LOG_ERROR,
  277. "ICQ ratecontrol mode requested, but is not supported by this SDK version\n");
  278. return AVERROR(ENOSYS);
  279. }
  280. if (want_qscale) {
  281. rc_mode = MFX_RATECONTROL_CQP;
  282. rc_desc = "constant quantization parameter (CQP)";
  283. }
  284. #if QSV_HAVE_VCM
  285. else if (want_vcm) {
  286. rc_mode = MFX_RATECONTROL_VCM;
  287. rc_desc = "video conferencing mode (VCM)";
  288. }
  289. #endif
  290. #if QSV_HAVE_LA
  291. else if (want_la) {
  292. rc_mode = MFX_RATECONTROL_LA;
  293. rc_desc = "VBR with lookahead (LA)";
  294. #if QSV_HAVE_ICQ
  295. if (avctx->global_quality > 0) {
  296. rc_mode = MFX_RATECONTROL_LA_ICQ;
  297. rc_desc = "intelligent constant quality with lookahead (LA_ICQ)";
  298. }
  299. #endif
  300. }
  301. #endif
  302. #if QSV_HAVE_ICQ
  303. else if (avctx->global_quality > 0) {
  304. rc_mode = MFX_RATECONTROL_ICQ;
  305. rc_desc = "intelligent constant quality (ICQ)";
  306. }
  307. #endif
  308. else if (avctx->rc_max_rate == avctx->bit_rate) {
  309. rc_mode = MFX_RATECONTROL_CBR;
  310. rc_desc = "constant bitrate (CBR)";
  311. }
  312. #if QSV_HAVE_AVBR
  313. else if (!avctx->rc_max_rate) {
  314. rc_mode = MFX_RATECONTROL_AVBR;
  315. rc_desc = "average variable bitrate (AVBR)";
  316. }
  317. #endif
  318. else {
  319. rc_mode = MFX_RATECONTROL_VBR;
  320. rc_desc = "variable bitrate (VBR)";
  321. }
  322. q->param.mfx.RateControlMethod = rc_mode;
  323. av_log(avctx, AV_LOG_VERBOSE, "Using the %s ratecontrol method\n", rc_desc);
  324. return 0;
  325. }
  326. static int check_enc_param(AVCodecContext *avctx, QSVEncContext *q)
  327. {
  328. mfxVideoParam param_out = { .mfx.CodecId = q->param.mfx.CodecId };
  329. mfxStatus ret;
  330. #define UNMATCH(x) (param_out.mfx.x != q->param.mfx.x)
  331. ret = MFXVideoENCODE_Query(q->session, &q->param, &param_out);
  332. if (ret < 0) {
  333. if (UNMATCH(CodecId))
  334. av_log(avctx, AV_LOG_ERROR, "Current codec type is unsupported\n");
  335. if (UNMATCH(CodecProfile))
  336. av_log(avctx, AV_LOG_ERROR, "Current profile is unsupported\n");
  337. if (UNMATCH(RateControlMethod))
  338. av_log(avctx, AV_LOG_ERROR, "Selected ratecontrol mode is unsupported\n");
  339. if (UNMATCH(LowPower))
  340. av_log(avctx, AV_LOG_ERROR, "Low power mode is unsupported\n");
  341. if (UNMATCH(FrameInfo.FrameRateExtN) || UNMATCH(FrameInfo.FrameRateExtD))
  342. av_log(avctx, AV_LOG_ERROR, "Current frame rate is unsupported\n");
  343. if (UNMATCH(FrameInfo.PicStruct))
  344. av_log(avctx, AV_LOG_ERROR, "Current picture structure is unsupported\n");
  345. if (UNMATCH(FrameInfo.Width) || UNMATCH(FrameInfo.Height))
  346. av_log(avctx, AV_LOG_ERROR, "Current resolution is unsupported\n");
  347. if (UNMATCH(FrameInfo.FourCC))
  348. av_log(avctx, AV_LOG_ERROR, "Current pixel format is unsupported\n");
  349. return 0;
  350. }
  351. return 1;
  352. }
  353. static int init_video_param_jpeg(AVCodecContext *avctx, QSVEncContext *q)
  354. {
  355. enum AVPixelFormat sw_format = avctx->pix_fmt == AV_PIX_FMT_QSV ?
  356. avctx->sw_pix_fmt : avctx->pix_fmt;
  357. const AVPixFmtDescriptor *desc;
  358. int ret;
  359. ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
  360. if (ret < 0)
  361. return AVERROR_BUG;
  362. q->param.mfx.CodecId = ret;
  363. if (avctx->level > 0)
  364. q->param.mfx.CodecLevel = avctx->level;
  365. q->param.mfx.CodecProfile = q->profile;
  366. desc = av_pix_fmt_desc_get(sw_format);
  367. if (!desc)
  368. return AVERROR_BUG;
  369. ff_qsv_map_pixfmt(sw_format, &q->param.mfx.FrameInfo.FourCC);
  370. q->param.mfx.FrameInfo.CropX = 0;
  371. q->param.mfx.FrameInfo.CropY = 0;
  372. q->param.mfx.FrameInfo.CropW = avctx->width;
  373. q->param.mfx.FrameInfo.CropH = avctx->height;
  374. q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
  375. q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
  376. q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  377. q->param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
  378. q->param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
  379. q->param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
  380. q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, 16);
  381. q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, 16);
  382. if (avctx->hw_frames_ctx) {
  383. AVHWFramesContext *frames_ctx = (AVHWFramesContext *)avctx->hw_frames_ctx->data;
  384. AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
  385. q->param.mfx.FrameInfo.Width = frames_hwctx->surfaces[0].Info.Width;
  386. q->param.mfx.FrameInfo.Height = frames_hwctx->surfaces[0].Info.Height;
  387. }
  388. if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
  389. q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
  390. q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den;
  391. } else {
  392. q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den;
  393. q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num;
  394. }
  395. q->param.mfx.Interleaved = 1;
  396. q->param.mfx.Quality = av_clip(avctx->global_quality, 1, 100);
  397. q->param.mfx.RestartInterval = 0;
  398. return 0;
  399. }
  400. static int init_video_param(AVCodecContext *avctx, QSVEncContext *q)
  401. {
  402. enum AVPixelFormat sw_format = avctx->pix_fmt == AV_PIX_FMT_QSV ?
  403. avctx->sw_pix_fmt : avctx->pix_fmt;
  404. const AVPixFmtDescriptor *desc;
  405. float quant;
  406. int ret;
  407. ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
  408. if (ret < 0)
  409. return AVERROR_BUG;
  410. q->param.mfx.CodecId = ret;
  411. if (avctx->level > 0)
  412. q->param.mfx.CodecLevel = avctx->level;
  413. if (avctx->compression_level == FF_COMPRESSION_DEFAULT) {
  414. avctx->compression_level = q->preset;
  415. } else if (avctx->compression_level >= 0) {
  416. if (avctx->compression_level > MFX_TARGETUSAGE_BEST_SPEED) {
  417. av_log(avctx, AV_LOG_WARNING, "Invalid compression level: "
  418. "valid range is 0-%d, using %d instead\n",
  419. MFX_TARGETUSAGE_BEST_SPEED, MFX_TARGETUSAGE_BEST_SPEED);
  420. avctx->compression_level = MFX_TARGETUSAGE_BEST_SPEED;
  421. }
  422. }
  423. q->param.mfx.CodecProfile = q->profile;
  424. q->param.mfx.TargetUsage = avctx->compression_level;
  425. q->param.mfx.GopPicSize = FFMAX(0, avctx->gop_size);
  426. q->param.mfx.GopRefDist = FFMAX(-1, avctx->max_b_frames) + 1;
  427. q->param.mfx.GopOptFlag = avctx->flags & AV_CODEC_FLAG_CLOSED_GOP ?
  428. MFX_GOP_CLOSED : 0;
  429. q->param.mfx.IdrInterval = q->idr_interval;
  430. q->param.mfx.NumSlice = avctx->slices;
  431. q->param.mfx.NumRefFrame = FFMAX(0, avctx->refs);
  432. q->param.mfx.EncodedOrder = 0;
  433. q->param.mfx.BufferSizeInKB = 0;
  434. desc = av_pix_fmt_desc_get(sw_format);
  435. if (!desc)
  436. return AVERROR_BUG;
  437. ff_qsv_map_pixfmt(sw_format, &q->param.mfx.FrameInfo.FourCC);
  438. q->param.mfx.FrameInfo.CropX = 0;
  439. q->param.mfx.FrameInfo.CropY = 0;
  440. q->param.mfx.FrameInfo.CropW = avctx->width;
  441. q->param.mfx.FrameInfo.CropH = avctx->height;
  442. q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
  443. q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
  444. q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  445. q->param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
  446. q->param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
  447. q->param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
  448. // TODO: detect version of MFX--if the minor version is greater than
  449. // or equal to 19, then can use the same alignment settings as H.264
  450. // for HEVC
  451. q->width_align = avctx->codec_id == AV_CODEC_ID_HEVC ? 32 : 16;
  452. q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, q->width_align);
  453. if (avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT) {
  454. // it is important that PicStruct be setup correctly from the
  455. // start--otherwise, encoding doesn't work and results in a bunch
  456. // of incompatible video parameter errors
  457. q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
  458. // height alignment always must be 32 for interlaced video
  459. q->height_align = 32;
  460. } else {
  461. q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  462. // for progressive video, the height should be aligned to 16 for
  463. // H.264. For HEVC, depending on the version of MFX, it should be
  464. // either 32 or 16. The lower number is better if possible.
  465. q->height_align = avctx->codec_id == AV_CODEC_ID_HEVC ? 32 : 16;
  466. }
  467. q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, q->height_align);
  468. if (avctx->hw_frames_ctx) {
  469. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  470. AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
  471. q->param.mfx.FrameInfo.Width = frames_hwctx->surfaces[0].Info.Width;
  472. q->param.mfx.FrameInfo.Height = frames_hwctx->surfaces[0].Info.Height;
  473. }
  474. if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
  475. q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
  476. q->param.mfx.FrameInfo.FrameRateExtD = avctx->framerate.den;
  477. } else {
  478. q->param.mfx.FrameInfo.FrameRateExtN = avctx->time_base.den;
  479. q->param.mfx.FrameInfo.FrameRateExtD = avctx->time_base.num;
  480. }
  481. ret = select_rc_mode(avctx, q);
  482. if (ret < 0)
  483. return ret;
  484. switch (q->param.mfx.RateControlMethod) {
  485. case MFX_RATECONTROL_CBR:
  486. case MFX_RATECONTROL_VBR:
  487. #if QSV_HAVE_VCM
  488. case MFX_RATECONTROL_VCM:
  489. #endif
  490. q->param.mfx.BufferSizeInKB = avctx->rc_buffer_size / 8000;
  491. q->param.mfx.InitialDelayInKB = avctx->rc_initial_buffer_occupancy / 1000;
  492. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  493. q->param.mfx.MaxKbps = avctx->rc_max_rate / 1000;
  494. break;
  495. case MFX_RATECONTROL_CQP:
  496. quant = avctx->global_quality / FF_QP2LAMBDA;
  497. q->param.mfx.QPI = av_clip(quant * fabs(avctx->i_quant_factor) + avctx->i_quant_offset, 0, 51);
  498. q->param.mfx.QPP = av_clip(quant, 0, 51);
  499. q->param.mfx.QPB = av_clip(quant * fabs(avctx->b_quant_factor) + avctx->b_quant_offset, 0, 51);
  500. break;
  501. #if QSV_HAVE_AVBR
  502. case MFX_RATECONTROL_AVBR:
  503. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  504. q->param.mfx.Convergence = q->avbr_convergence;
  505. q->param.mfx.Accuracy = q->avbr_accuracy;
  506. break;
  507. #endif
  508. #if QSV_HAVE_LA
  509. case MFX_RATECONTROL_LA:
  510. q->param.mfx.TargetKbps = avctx->bit_rate / 1000;
  511. q->extco2.LookAheadDepth = q->la_depth;
  512. break;
  513. #if QSV_HAVE_ICQ
  514. case MFX_RATECONTROL_LA_ICQ:
  515. q->extco2.LookAheadDepth = q->la_depth;
  516. case MFX_RATECONTROL_ICQ:
  517. q->param.mfx.ICQQuality = avctx->global_quality;
  518. break;
  519. #endif
  520. #endif
  521. }
  522. // the HEVC encoder plugin currently fails if coding options
  523. // are provided
  524. if (avctx->codec_id != AV_CODEC_ID_HEVC) {
  525. q->extco.Header.BufferId = MFX_EXTBUFF_CODING_OPTION;
  526. q->extco.Header.BufferSz = sizeof(q->extco);
  527. if (q->rdo >= 0)
  528. q->extco.RateDistortionOpt = q->rdo > 0 ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  529. if (avctx->codec_id == AV_CODEC_ID_H264) {
  530. #if FF_API_CODER_TYPE
  531. FF_DISABLE_DEPRECATION_WARNINGS
  532. if (avctx->coder_type >= 0)
  533. q->cavlc = avctx->coder_type == FF_CODER_TYPE_VLC;
  534. FF_ENABLE_DEPRECATION_WARNINGS
  535. #endif
  536. q->extco.CAVLC = q->cavlc ? MFX_CODINGOPTION_ON
  537. : MFX_CODINGOPTION_UNKNOWN;
  538. if (avctx->strict_std_compliance != FF_COMPLIANCE_NORMAL)
  539. q->extco.NalHrdConformance = avctx->strict_std_compliance > FF_COMPLIANCE_NORMAL ?
  540. MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  541. if (q->single_sei_nal_unit >= 0)
  542. q->extco.SingleSeiNalUnit = q->single_sei_nal_unit ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  543. if (q->recovery_point_sei >= 0)
  544. q->extco.RecoveryPointSEI = q->recovery_point_sei ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  545. q->extco.MaxDecFrameBuffering = q->max_dec_frame_buffering;
  546. q->extco.AUDelimiter = q->aud ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  547. }
  548. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco;
  549. #if QSV_HAVE_CO2
  550. if (avctx->codec_id == AV_CODEC_ID_H264) {
  551. q->extco2.Header.BufferId = MFX_EXTBUFF_CODING_OPTION2;
  552. q->extco2.Header.BufferSz = sizeof(q->extco2);
  553. if (q->int_ref_type >= 0)
  554. q->extco2.IntRefType = q->int_ref_type;
  555. if (q->int_ref_cycle_size >= 0)
  556. q->extco2.IntRefCycleSize = q->int_ref_cycle_size;
  557. if (q->int_ref_qp_delta != INT16_MIN)
  558. q->extco2.IntRefQPDelta = q->int_ref_qp_delta;
  559. if (q->bitrate_limit >= 0)
  560. q->extco2.BitrateLimit = q->bitrate_limit ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  561. if (q->mbbrc >= 0)
  562. q->extco2.MBBRC = q->mbbrc ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  563. if (q->extbrc >= 0)
  564. q->extco2.ExtBRC = q->extbrc ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  565. if (q->max_frame_size >= 0)
  566. q->extco2.MaxFrameSize = q->max_frame_size;
  567. #if QSV_HAVE_MAX_SLICE_SIZE
  568. if (q->max_slice_size >= 0)
  569. q->extco2.MaxSliceSize = q->max_slice_size;
  570. #endif
  571. #if QSV_HAVE_TRELLIS
  572. q->extco2.Trellis = q->trellis;
  573. #endif
  574. #if QSV_HAVE_LA_DS
  575. q->extco2.LookAheadDS = q->la_ds;
  576. #endif
  577. #if QSV_HAVE_BREF_TYPE
  578. #if FF_API_PRIVATE_OPT
  579. FF_DISABLE_DEPRECATION_WARNINGS
  580. if (avctx->b_frame_strategy >= 0)
  581. q->b_strategy = avctx->b_frame_strategy;
  582. FF_ENABLE_DEPRECATION_WARNINGS
  583. #endif
  584. if (q->b_strategy >= 0)
  585. q->extco2.BRefType = q->b_strategy ? MFX_B_REF_PYRAMID : MFX_B_REF_OFF;
  586. if (q->adaptive_i >= 0)
  587. q->extco2.AdaptiveI = q->adaptive_i ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  588. if (q->adaptive_b >= 0)
  589. q->extco2.AdaptiveB = q->adaptive_b ? MFX_CODINGOPTION_ON : MFX_CODINGOPTION_OFF;
  590. #endif
  591. #if QSV_VERSION_ATLEAST(1, 9)
  592. if (avctx->qmin >= 0 && avctx->qmax >= 0 && avctx->qmin > avctx->qmax) {
  593. av_log(avctx, AV_LOG_ERROR, "qmin and or qmax are set but invalid, please make sure min <= max\n");
  594. return AVERROR(EINVAL);
  595. }
  596. if (avctx->qmin >= 0) {
  597. q->extco2.MinQPI = avctx->qmin > 51 ? 51 : avctx->qmin;
  598. q->extco2.MinQPP = q->extco2.MinQPB = q->extco2.MinQPI;
  599. }
  600. if (avctx->qmax >= 0) {
  601. q->extco2.MaxQPI = avctx->qmax > 51 ? 51 : avctx->qmax;
  602. q->extco2.MaxQPP = q->extco2.MaxQPB = q->extco2.MaxQPI;
  603. }
  604. #endif
  605. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco2;
  606. }
  607. #endif
  608. #if QSV_HAVE_MF
  609. if (avctx->codec_id == AV_CODEC_ID_H264) {
  610. mfxVersion ver;
  611. ret = MFXQueryVersion(q->session,&ver);
  612. if (ret >= MFX_ERR_NONE && QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
  613. q->extmfp.Header.BufferId = MFX_EXTBUFF_MULTI_FRAME_PARAM;
  614. q->extmfp.Header.BufferSz = sizeof(q->extmfp);
  615. q->extmfp.MFMode = q->mfmode;
  616. av_log(avctx,AV_LOG_VERBOSE,"MFMode:%d\n", q->extmfp.MFMode);
  617. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extmfp;
  618. }
  619. }
  620. #endif
  621. }
  622. if (!check_enc_param(avctx,q)) {
  623. av_log(avctx, AV_LOG_ERROR,
  624. "some encoding parameters are not supported by the QSV "
  625. "runtime. Please double check the input parameters.\n");
  626. return AVERROR(ENOSYS);
  627. }
  628. return 0;
  629. }
  630. static int qsv_retrieve_enc_jpeg_params(AVCodecContext *avctx, QSVEncContext *q)
  631. {
  632. int ret = 0;
  633. ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param);
  634. if (ret < 0)
  635. return ff_qsv_print_error(avctx, ret,
  636. "Error calling GetVideoParam");
  637. q->packet_size = q->param.mfx.BufferSizeInKB * 1000;
  638. // for qsv mjpeg the return value maybe 0 so alloc the buffer
  639. if (q->packet_size == 0)
  640. q->packet_size = q->param.mfx.FrameInfo.Height * q->param.mfx.FrameInfo.Width * 4;
  641. return 0;
  642. }
  643. static int qsv_retrieve_enc_params(AVCodecContext *avctx, QSVEncContext *q)
  644. {
  645. AVCPBProperties *cpb_props;
  646. uint8_t sps_buf[128];
  647. uint8_t pps_buf[128];
  648. mfxExtCodingOptionSPSPPS extradata = {
  649. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION_SPSPPS,
  650. .Header.BufferSz = sizeof(extradata),
  651. .SPSBuffer = sps_buf, .SPSBufSize = sizeof(sps_buf),
  652. .PPSBuffer = pps_buf, .PPSBufSize = sizeof(pps_buf)
  653. };
  654. mfxExtCodingOption co = {
  655. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION,
  656. .Header.BufferSz = sizeof(co),
  657. };
  658. #if QSV_HAVE_CO2
  659. mfxExtCodingOption2 co2 = {
  660. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION2,
  661. .Header.BufferSz = sizeof(co2),
  662. };
  663. #endif
  664. #if QSV_HAVE_CO3
  665. mfxExtCodingOption3 co3 = {
  666. .Header.BufferId = MFX_EXTBUFF_CODING_OPTION3,
  667. .Header.BufferSz = sizeof(co3),
  668. };
  669. #endif
  670. mfxExtBuffer *ext_buffers[] = {
  671. (mfxExtBuffer*)&extradata,
  672. (mfxExtBuffer*)&co,
  673. #if QSV_HAVE_CO2
  674. (mfxExtBuffer*)&co2,
  675. #endif
  676. #if QSV_HAVE_CO3
  677. (mfxExtBuffer*)&co3,
  678. #endif
  679. };
  680. int need_pps = avctx->codec_id != AV_CODEC_ID_MPEG2VIDEO;
  681. int ret;
  682. q->param.ExtParam = ext_buffers;
  683. q->param.NumExtParam = FF_ARRAY_ELEMS(ext_buffers);
  684. ret = MFXVideoENCODE_GetVideoParam(q->session, &q->param);
  685. if (ret < 0)
  686. return ff_qsv_print_error(avctx, ret,
  687. "Error calling GetVideoParam");
  688. q->packet_size = q->param.mfx.BufferSizeInKB * 1000;
  689. if (!extradata.SPSBufSize || (need_pps && !extradata.PPSBufSize)) {
  690. av_log(avctx, AV_LOG_ERROR, "No extradata returned from libmfx.\n");
  691. return AVERROR_UNKNOWN;
  692. }
  693. avctx->extradata = av_malloc(extradata.SPSBufSize + need_pps * extradata.PPSBufSize +
  694. AV_INPUT_BUFFER_PADDING_SIZE);
  695. if (!avctx->extradata)
  696. return AVERROR(ENOMEM);
  697. memcpy(avctx->extradata, sps_buf, extradata.SPSBufSize);
  698. if (need_pps)
  699. memcpy(avctx->extradata + extradata.SPSBufSize, pps_buf, extradata.PPSBufSize);
  700. avctx->extradata_size = extradata.SPSBufSize + need_pps * extradata.PPSBufSize;
  701. memset(avctx->extradata + avctx->extradata_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  702. cpb_props = ff_add_cpb_side_data(avctx);
  703. if (!cpb_props)
  704. return AVERROR(ENOMEM);
  705. cpb_props->max_bitrate = avctx->rc_max_rate;
  706. cpb_props->min_bitrate = avctx->rc_min_rate;
  707. cpb_props->avg_bitrate = avctx->bit_rate;
  708. cpb_props->buffer_size = avctx->rc_buffer_size;
  709. dump_video_param(avctx, q, ext_buffers + 1);
  710. return 0;
  711. }
  712. static int qsv_init_opaque_alloc(AVCodecContext *avctx, QSVEncContext *q)
  713. {
  714. AVQSVContext *qsv = avctx->hwaccel_context;
  715. mfxFrameSurface1 *surfaces;
  716. int nb_surfaces, i;
  717. nb_surfaces = qsv->nb_opaque_surfaces + q->req.NumFrameSuggested;
  718. q->opaque_alloc_buf = av_buffer_allocz(sizeof(*surfaces) * nb_surfaces);
  719. if (!q->opaque_alloc_buf)
  720. return AVERROR(ENOMEM);
  721. q->opaque_surfaces = av_malloc_array(nb_surfaces, sizeof(*q->opaque_surfaces));
  722. if (!q->opaque_surfaces)
  723. return AVERROR(ENOMEM);
  724. surfaces = (mfxFrameSurface1*)q->opaque_alloc_buf->data;
  725. for (i = 0; i < nb_surfaces; i++) {
  726. surfaces[i].Info = q->req.Info;
  727. q->opaque_surfaces[i] = surfaces + i;
  728. }
  729. q->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  730. q->opaque_alloc.Header.BufferSz = sizeof(q->opaque_alloc);
  731. q->opaque_alloc.In.Surfaces = q->opaque_surfaces;
  732. q->opaque_alloc.In.NumSurface = nb_surfaces;
  733. q->opaque_alloc.In.Type = q->req.Type;
  734. q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->opaque_alloc;
  735. qsv->nb_opaque_surfaces = nb_surfaces;
  736. qsv->opaque_surfaces = q->opaque_alloc_buf;
  737. qsv->opaque_alloc_type = q->req.Type;
  738. return 0;
  739. }
  740. static int qsvenc_init_session(AVCodecContext *avctx, QSVEncContext *q)
  741. {
  742. int ret;
  743. if (avctx->hwaccel_context) {
  744. AVQSVContext *qsv = avctx->hwaccel_context;
  745. q->session = qsv->session;
  746. } else if (avctx->hw_frames_ctx) {
  747. q->frames_ctx.hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
  748. if (!q->frames_ctx.hw_frames_ctx)
  749. return AVERROR(ENOMEM);
  750. ret = ff_qsv_init_session_frames(avctx, &q->internal_session,
  751. &q->frames_ctx, q->load_plugins,
  752. q->param.IOPattern == MFX_IOPATTERN_IN_OPAQUE_MEMORY);
  753. if (ret < 0) {
  754. av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
  755. return ret;
  756. }
  757. q->session = q->internal_session;
  758. } else if (avctx->hw_device_ctx) {
  759. ret = ff_qsv_init_session_device(avctx, &q->internal_session,
  760. avctx->hw_device_ctx, q->load_plugins);
  761. if (ret < 0)
  762. return ret;
  763. q->session = q->internal_session;
  764. } else {
  765. ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
  766. q->load_plugins);
  767. if (ret < 0)
  768. return ret;
  769. q->session = q->internal_session;
  770. }
  771. return 0;
  772. }
  773. static inline unsigned int qsv_fifo_item_size(void)
  774. {
  775. return sizeof(AVPacket) + sizeof(mfxSyncPoint*) + sizeof(mfxBitstream*);
  776. }
  777. static inline unsigned int qsv_fifo_size(const AVFifoBuffer* fifo)
  778. {
  779. return av_fifo_size(fifo)/qsv_fifo_item_size();
  780. }
  781. int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
  782. {
  783. int iopattern = 0;
  784. int opaque_alloc = 0;
  785. int ret;
  786. q->param.AsyncDepth = q->async_depth;
  787. q->async_fifo = av_fifo_alloc(q->async_depth * qsv_fifo_item_size());
  788. if (!q->async_fifo)
  789. return AVERROR(ENOMEM);
  790. if (avctx->hwaccel_context) {
  791. AVQSVContext *qsv = avctx->hwaccel_context;
  792. iopattern = qsv->iopattern;
  793. opaque_alloc = qsv->opaque_alloc;
  794. }
  795. if (avctx->hw_frames_ctx) {
  796. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  797. AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
  798. if (!iopattern) {
  799. if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
  800. iopattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  801. else if (frames_hwctx->frame_type &
  802. (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
  803. iopattern = MFX_IOPATTERN_IN_VIDEO_MEMORY;
  804. }
  805. }
  806. if (!iopattern)
  807. iopattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
  808. q->param.IOPattern = iopattern;
  809. ret = qsvenc_init_session(avctx, q);
  810. if (ret < 0)
  811. return ret;
  812. // in the mfxInfoMFX struct, JPEG is different from other codecs
  813. switch (avctx->codec_id) {
  814. case AV_CODEC_ID_MJPEG:
  815. ret = init_video_param_jpeg(avctx, q);
  816. break;
  817. default:
  818. ret = init_video_param(avctx, q);
  819. break;
  820. }
  821. if (ret < 0)
  822. return ret;
  823. ret = MFXVideoENCODE_Query(q->session, &q->param, &q->param);
  824. if (ret == MFX_WRN_PARTIAL_ACCELERATION) {
  825. av_log(avctx, AV_LOG_WARNING, "Encoder will work with partial HW acceleration\n");
  826. } else if (ret < 0) {
  827. return ff_qsv_print_error(avctx, ret,
  828. "Error querying encoder params");
  829. }
  830. ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req);
  831. if (ret < 0)
  832. return ff_qsv_print_error(avctx, ret,
  833. "Error querying (IOSurf) the encoding parameters");
  834. if (opaque_alloc) {
  835. ret = qsv_init_opaque_alloc(avctx, q);
  836. if (ret < 0)
  837. return ret;
  838. }
  839. if (avctx->hwaccel_context) {
  840. AVQSVContext *qsv = avctx->hwaccel_context;
  841. int i, j;
  842. q->extparam = av_mallocz_array(qsv->nb_ext_buffers + q->nb_extparam_internal,
  843. sizeof(*q->extparam));
  844. if (!q->extparam)
  845. return AVERROR(ENOMEM);
  846. q->param.ExtParam = q->extparam;
  847. for (i = 0; i < qsv->nb_ext_buffers; i++)
  848. q->param.ExtParam[i] = qsv->ext_buffers[i];
  849. q->param.NumExtParam = qsv->nb_ext_buffers;
  850. for (i = 0; i < q->nb_extparam_internal; i++) {
  851. for (j = 0; j < qsv->nb_ext_buffers; j++) {
  852. if (qsv->ext_buffers[j]->BufferId == q->extparam_internal[i]->BufferId)
  853. break;
  854. }
  855. if (j < qsv->nb_ext_buffers)
  856. continue;
  857. q->param.ExtParam[q->param.NumExtParam++] = q->extparam_internal[i];
  858. }
  859. } else {
  860. q->param.ExtParam = q->extparam_internal;
  861. q->param.NumExtParam = q->nb_extparam_internal;
  862. }
  863. ret = MFXVideoENCODE_Init(q->session, &q->param);
  864. if (ret < 0)
  865. return ff_qsv_print_error(avctx, ret,
  866. "Error initializing the encoder");
  867. else if (ret > 0)
  868. ff_qsv_print_warning(avctx, ret,
  869. "Warning in encoder initialization");
  870. switch (avctx->codec_id) {
  871. case AV_CODEC_ID_MJPEG:
  872. ret = qsv_retrieve_enc_jpeg_params(avctx, q);
  873. break;
  874. default:
  875. ret = qsv_retrieve_enc_params(avctx, q);
  876. break;
  877. }
  878. if (ret < 0) {
  879. av_log(avctx, AV_LOG_ERROR, "Error retrieving encoding parameters.\n");
  880. return ret;
  881. }
  882. q->avctx = avctx;
  883. return 0;
  884. }
  885. static void clear_unused_frames(QSVEncContext *q)
  886. {
  887. QSVFrame *cur = q->work_frames;
  888. while (cur) {
  889. if (cur->used && !cur->surface.Data.Locked) {
  890. if (cur->frame->format == AV_PIX_FMT_QSV) {
  891. av_frame_unref(cur->frame);
  892. }
  893. cur->used = 0;
  894. }
  895. cur = cur->next;
  896. }
  897. }
  898. static int get_free_frame(QSVEncContext *q, QSVFrame **f)
  899. {
  900. QSVFrame *frame, **last;
  901. clear_unused_frames(q);
  902. frame = q->work_frames;
  903. last = &q->work_frames;
  904. while (frame) {
  905. if (!frame->used) {
  906. *f = frame;
  907. frame->used = 1;
  908. return 0;
  909. }
  910. last = &frame->next;
  911. frame = frame->next;
  912. }
  913. frame = av_mallocz(sizeof(*frame));
  914. if (!frame)
  915. return AVERROR(ENOMEM);
  916. frame->frame = av_frame_alloc();
  917. if (!frame->frame) {
  918. av_freep(&frame);
  919. return AVERROR(ENOMEM);
  920. }
  921. *last = frame;
  922. *f = frame;
  923. frame->used = 1;
  924. return 0;
  925. }
  926. static int submit_frame(QSVEncContext *q, const AVFrame *frame,
  927. mfxFrameSurface1 **surface)
  928. {
  929. QSVFrame *qf;
  930. int ret;
  931. ret = get_free_frame(q, &qf);
  932. if (ret < 0)
  933. return ret;
  934. if (frame->format == AV_PIX_FMT_QSV) {
  935. ret = av_frame_ref(qf->frame, frame);
  936. if (ret < 0)
  937. return ret;
  938. qf->surface = *(mfxFrameSurface1*)qf->frame->data[3];
  939. if (q->frames_ctx.mids) {
  940. ret = ff_qsv_find_surface_idx(&q->frames_ctx, qf);
  941. if (ret < 0)
  942. return ret;
  943. qf->surface.Data.MemId = &q->frames_ctx.mids[ret];
  944. }
  945. } else {
  946. /* make a copy if the input is not padded as libmfx requires */
  947. /* and to make allocation continious for data[0]/data[1] */
  948. if ((frame->height & 31 || frame->linesize[0] & (q->width_align - 1)) ||
  949. (frame->data[1] - frame->data[0] != frame->linesize[0] * FFALIGN(qf->frame->height, q->height_align))) {
  950. qf->frame->height = FFALIGN(frame->height, q->height_align);
  951. qf->frame->width = FFALIGN(frame->width, q->width_align);
  952. qf->frame->format = frame->format;
  953. if (!qf->frame->data[0]) {
  954. ret = av_frame_get_buffer(qf->frame, q->width_align);
  955. if (ret < 0)
  956. return ret;
  957. }
  958. qf->frame->height = frame->height;
  959. qf->frame->width = frame->width;
  960. ret = av_frame_copy(qf->frame, frame);
  961. if (ret < 0) {
  962. av_frame_unref(qf->frame);
  963. return ret;
  964. }
  965. } else {
  966. ret = av_frame_ref(qf->frame, frame);
  967. if (ret < 0)
  968. return ret;
  969. }
  970. qf->surface.Info = q->param.mfx.FrameInfo;
  971. qf->surface.Info.PicStruct =
  972. !frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
  973. frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  974. MFX_PICSTRUCT_FIELD_BFF;
  975. if (frame->repeat_pict == 1)
  976. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
  977. else if (frame->repeat_pict == 2)
  978. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
  979. else if (frame->repeat_pict == 4)
  980. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
  981. qf->surface.Data.PitchLow = qf->frame->linesize[0];
  982. qf->surface.Data.Y = qf->frame->data[0];
  983. qf->surface.Data.UV = qf->frame->data[1];
  984. }
  985. qf->surface.Data.TimeStamp = av_rescale_q(frame->pts, q->avctx->time_base, (AVRational){1, 90000});
  986. *surface = &qf->surface;
  987. return 0;
  988. }
  989. static void print_interlace_msg(AVCodecContext *avctx, QSVEncContext *q)
  990. {
  991. if (q->param.mfx.CodecId == MFX_CODEC_AVC) {
  992. if (q->param.mfx.CodecProfile == MFX_PROFILE_AVC_BASELINE ||
  993. q->param.mfx.CodecLevel < MFX_LEVEL_AVC_21 ||
  994. q->param.mfx.CodecLevel > MFX_LEVEL_AVC_41)
  995. av_log(avctx, AV_LOG_WARNING,
  996. "Interlaced coding is supported"
  997. " at Main/High Profile Level 2.2-4.0\n");
  998. }
  999. }
  1000. static int encode_frame(AVCodecContext *avctx, QSVEncContext *q,
  1001. const AVFrame *frame)
  1002. {
  1003. AVPacket new_pkt = { 0 };
  1004. mfxBitstream *bs;
  1005. mfxFrameSurface1 *surf = NULL;
  1006. mfxSyncPoint *sync = NULL;
  1007. int ret;
  1008. if (frame) {
  1009. ret = submit_frame(q, frame, &surf);
  1010. if (ret < 0) {
  1011. av_log(avctx, AV_LOG_ERROR, "Error submitting the frame for encoding.\n");
  1012. return ret;
  1013. }
  1014. }
  1015. ret = av_new_packet(&new_pkt, q->packet_size);
  1016. if (ret < 0) {
  1017. av_log(avctx, AV_LOG_ERROR, "Error allocating the output packet\n");
  1018. return ret;
  1019. }
  1020. bs = av_mallocz(sizeof(*bs));
  1021. if (!bs) {
  1022. av_packet_unref(&new_pkt);
  1023. return AVERROR(ENOMEM);
  1024. }
  1025. bs->Data = new_pkt.data;
  1026. bs->MaxLength = new_pkt.size;
  1027. sync = av_mallocz(sizeof(*sync));
  1028. if (!sync) {
  1029. av_freep(&bs);
  1030. av_packet_unref(&new_pkt);
  1031. return AVERROR(ENOMEM);
  1032. }
  1033. do {
  1034. ret = MFXVideoENCODE_EncodeFrameAsync(q->session, NULL, surf, bs, sync);
  1035. if (ret == MFX_WRN_DEVICE_BUSY)
  1036. av_usleep(1);
  1037. } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_WRN_IN_EXECUTION);
  1038. if (ret > 0)
  1039. ff_qsv_print_warning(avctx, ret, "Warning during encoding");
  1040. if (ret < 0) {
  1041. av_packet_unref(&new_pkt);
  1042. av_freep(&bs);
  1043. av_freep(&sync);
  1044. return (ret == MFX_ERR_MORE_DATA) ?
  1045. 0 : ff_qsv_print_error(avctx, ret, "Error during encoding");
  1046. }
  1047. if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame)
  1048. print_interlace_msg(avctx, q);
  1049. if (*sync) {
  1050. av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  1051. av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
  1052. av_fifo_generic_write(q->async_fifo, &bs, sizeof(bs), NULL);
  1053. } else {
  1054. av_freep(&sync);
  1055. av_packet_unref(&new_pkt);
  1056. av_freep(&bs);
  1057. }
  1058. return 0;
  1059. }
  1060. int ff_qsv_encode(AVCodecContext *avctx, QSVEncContext *q,
  1061. AVPacket *pkt, const AVFrame *frame, int *got_packet)
  1062. {
  1063. int ret;
  1064. ret = encode_frame(avctx, q, frame);
  1065. if (ret < 0)
  1066. return ret;
  1067. if ((qsv_fifo_size(q->async_fifo) >= q->async_depth) ||
  1068. (!frame && av_fifo_size(q->async_fifo))) {
  1069. AVPacket new_pkt;
  1070. mfxBitstream *bs;
  1071. mfxSyncPoint *sync;
  1072. av_fifo_generic_read(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
  1073. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  1074. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  1075. do {
  1076. ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
  1077. } while (ret == MFX_WRN_IN_EXECUTION);
  1078. new_pkt.dts = av_rescale_q(bs->DecodeTimeStamp, (AVRational){1, 90000}, avctx->time_base);
  1079. new_pkt.pts = av_rescale_q(bs->TimeStamp, (AVRational){1, 90000}, avctx->time_base);
  1080. new_pkt.size = bs->DataLength;
  1081. if (bs->FrameType & MFX_FRAMETYPE_IDR ||
  1082. bs->FrameType & MFX_FRAMETYPE_xIDR)
  1083. new_pkt.flags |= AV_PKT_FLAG_KEY;
  1084. #if FF_API_CODED_FRAME
  1085. FF_DISABLE_DEPRECATION_WARNINGS
  1086. if (bs->FrameType & MFX_FRAMETYPE_I || bs->FrameType & MFX_FRAMETYPE_xI)
  1087. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
  1088. else if (bs->FrameType & MFX_FRAMETYPE_P || bs->FrameType & MFX_FRAMETYPE_xP)
  1089. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_P;
  1090. else if (bs->FrameType & MFX_FRAMETYPE_B || bs->FrameType & MFX_FRAMETYPE_xB)
  1091. avctx->coded_frame->pict_type = AV_PICTURE_TYPE_B;
  1092. FF_ENABLE_DEPRECATION_WARNINGS
  1093. #endif
  1094. av_freep(&bs);
  1095. av_freep(&sync);
  1096. if (pkt->data) {
  1097. if (pkt->size < new_pkt.size) {
  1098. av_log(avctx, AV_LOG_ERROR, "Submitted buffer not large enough: %d < %d\n",
  1099. pkt->size, new_pkt.size);
  1100. av_packet_unref(&new_pkt);
  1101. return AVERROR(EINVAL);
  1102. }
  1103. memcpy(pkt->data, new_pkt.data, new_pkt.size);
  1104. pkt->size = new_pkt.size;
  1105. ret = av_packet_copy_props(pkt, &new_pkt);
  1106. av_packet_unref(&new_pkt);
  1107. if (ret < 0)
  1108. return ret;
  1109. } else
  1110. *pkt = new_pkt;
  1111. *got_packet = 1;
  1112. }
  1113. return 0;
  1114. }
  1115. int ff_qsv_enc_close(AVCodecContext *avctx, QSVEncContext *q)
  1116. {
  1117. QSVFrame *cur;
  1118. if (q->session)
  1119. MFXVideoENCODE_Close(q->session);
  1120. if (q->internal_session)
  1121. MFXClose(q->internal_session);
  1122. q->session = NULL;
  1123. q->internal_session = NULL;
  1124. av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
  1125. av_buffer_unref(&q->frames_ctx.mids_buf);
  1126. cur = q->work_frames;
  1127. while (cur) {
  1128. q->work_frames = cur->next;
  1129. av_frame_free(&cur->frame);
  1130. av_freep(&cur);
  1131. cur = q->work_frames;
  1132. }
  1133. while (q->async_fifo && av_fifo_size(q->async_fifo)) {
  1134. AVPacket pkt;
  1135. mfxSyncPoint *sync;
  1136. mfxBitstream *bs;
  1137. av_fifo_generic_read(q->async_fifo, &pkt, sizeof(pkt), NULL);
  1138. av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
  1139. av_fifo_generic_read(q->async_fifo, &bs, sizeof(bs), NULL);
  1140. av_freep(&sync);
  1141. av_freep(&bs);
  1142. av_packet_unref(&pkt);
  1143. }
  1144. av_fifo_free(q->async_fifo);
  1145. q->async_fifo = NULL;
  1146. av_freep(&q->opaque_surfaces);
  1147. av_buffer_unref(&q->opaque_alloc_buf);
  1148. av_freep(&q->extparam);
  1149. return 0;
  1150. }