You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

749 lines
28KB

  1. /*
  2. * Copyright (c) 2010, Google, Inc.
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * AV1 encoder support via libaom
  23. */
  24. #define AOM_DISABLE_CTRL_TYPECHECKS 1
  25. #include <aom/aom_encoder.h>
  26. #include <aom/aomcx.h>
  27. #include "libavutil/base64.h"
  28. #include "libavutil/common.h"
  29. #include "libavutil/mathematics.h"
  30. #include "libavutil/opt.h"
  31. #include "libavutil/pixdesc.h"
  32. #include "avcodec.h"
  33. #include "internal.h"
  34. #include "profiles.h"
  35. /*
  36. * Portion of struct aom_codec_cx_pkt from aom_encoder.h.
  37. * One encoded frame returned from the library.
  38. */
  39. struct FrameListData {
  40. void *buf; /**< compressed data buffer */
  41. size_t sz; /**< length of compressed data */
  42. int64_t pts; /**< time stamp to show frame
  43. (in timebase units) */
  44. unsigned long duration; /**< duration to show frame
  45. (in timebase units) */
  46. uint32_t flags; /**< flags for this frame */
  47. struct FrameListData *next;
  48. };
  49. typedef struct AOMEncoderContext {
  50. AVClass *class;
  51. struct aom_codec_ctx encoder;
  52. struct aom_image rawimg;
  53. struct aom_fixed_buf twopass_stats;
  54. struct FrameListData *coded_frame_list;
  55. int cpu_used;
  56. int auto_alt_ref;
  57. int lag_in_frames;
  58. int error_resilient;
  59. int crf;
  60. int static_thresh;
  61. int drop_threshold;
  62. int noise_sensitivity;
  63. } AOMContext;
  64. static const char *const ctlidstr[] = {
  65. [AOME_SET_CPUUSED] = "AOME_SET_CPUUSED",
  66. [AOME_SET_CQ_LEVEL] = "AOME_SET_CQ_LEVEL",
  67. [AOME_SET_ENABLEAUTOALTREF] = "AOME_SET_ENABLEAUTOALTREF",
  68. [AOME_SET_STATIC_THRESHOLD] = "AOME_SET_STATIC_THRESHOLD",
  69. [AV1E_SET_COLOR_RANGE] = "AV1E_SET_COLOR_RANGE",
  70. [AV1E_SET_COLOR_PRIMARIES] = "AV1E_SET_COLOR_PRIMARIES",
  71. [AV1E_SET_MATRIX_COEFFICIENTS] = "AV1E_SET_MATRIX_COEFFICIENTS",
  72. [AV1E_SET_TRANSFER_CHARACTERISTICS] = "AV1E_SET_TRANSFER_CHARACTERISTICS",
  73. };
  74. static av_cold void log_encoder_error(AVCodecContext *avctx, const char *desc)
  75. {
  76. AOMContext *ctx = avctx->priv_data;
  77. const char *error = aom_codec_error(&ctx->encoder);
  78. const char *detail = aom_codec_error_detail(&ctx->encoder);
  79. av_log(avctx, AV_LOG_ERROR, "%s: %s\n", desc, error);
  80. if (detail)
  81. av_log(avctx, AV_LOG_ERROR, " Additional information: %s\n", detail);
  82. }
  83. static av_cold void dump_enc_cfg(AVCodecContext *avctx,
  84. const struct aom_codec_enc_cfg *cfg)
  85. {
  86. int width = -30;
  87. int level = AV_LOG_DEBUG;
  88. av_log(avctx, level, "aom_codec_enc_cfg\n");
  89. av_log(avctx, level, "generic settings\n"
  90. " %*s%u\n %*s%u\n %*s%u\n %*s%u\n %*s%u\n"
  91. " %*s%u\n %*s%u\n"
  92. " %*s{%u/%u}\n %*s%u\n %*s%d\n %*s%u\n",
  93. width, "g_usage:", cfg->g_usage,
  94. width, "g_threads:", cfg->g_threads,
  95. width, "g_profile:", cfg->g_profile,
  96. width, "g_w:", cfg->g_w,
  97. width, "g_h:", cfg->g_h,
  98. width, "g_bit_depth:", cfg->g_bit_depth,
  99. width, "g_input_bit_depth:", cfg->g_input_bit_depth,
  100. width, "g_timebase:", cfg->g_timebase.num, cfg->g_timebase.den,
  101. width, "g_error_resilient:", cfg->g_error_resilient,
  102. width, "g_pass:", cfg->g_pass,
  103. width, "g_lag_in_frames:", cfg->g_lag_in_frames);
  104. av_log(avctx, level, "rate control settings\n"
  105. " %*s%u\n %*s%d\n %*s%p(%"SIZE_SPECIFIER")\n %*s%u\n",
  106. width, "rc_dropframe_thresh:", cfg->rc_dropframe_thresh,
  107. width, "rc_end_usage:", cfg->rc_end_usage,
  108. width, "rc_twopass_stats_in:", cfg->rc_twopass_stats_in.buf, cfg->rc_twopass_stats_in.sz,
  109. width, "rc_target_bitrate:", cfg->rc_target_bitrate);
  110. av_log(avctx, level, "quantizer settings\n"
  111. " %*s%u\n %*s%u\n",
  112. width, "rc_min_quantizer:", cfg->rc_min_quantizer,
  113. width, "rc_max_quantizer:", cfg->rc_max_quantizer);
  114. av_log(avctx, level, "bitrate tolerance\n"
  115. " %*s%u\n %*s%u\n",
  116. width, "rc_undershoot_pct:", cfg->rc_undershoot_pct,
  117. width, "rc_overshoot_pct:", cfg->rc_overshoot_pct);
  118. av_log(avctx, level, "decoder buffer model\n"
  119. " %*s%u\n %*s%u\n %*s%u\n",
  120. width, "rc_buf_sz:", cfg->rc_buf_sz,
  121. width, "rc_buf_initial_sz:", cfg->rc_buf_initial_sz,
  122. width, "rc_buf_optimal_sz:", cfg->rc_buf_optimal_sz);
  123. av_log(avctx, level, "2 pass rate control settings\n"
  124. " %*s%u\n %*s%u\n %*s%u\n",
  125. width, "rc_2pass_vbr_bias_pct:", cfg->rc_2pass_vbr_bias_pct,
  126. width, "rc_2pass_vbr_minsection_pct:", cfg->rc_2pass_vbr_minsection_pct,
  127. width, "rc_2pass_vbr_maxsection_pct:", cfg->rc_2pass_vbr_maxsection_pct);
  128. av_log(avctx, level, "keyframing settings\n"
  129. " %*s%d\n %*s%u\n %*s%u\n",
  130. width, "kf_mode:", cfg->kf_mode,
  131. width, "kf_min_dist:", cfg->kf_min_dist,
  132. width, "kf_max_dist:", cfg->kf_max_dist);
  133. av_log(avctx, level, "\n");
  134. }
  135. static void coded_frame_add(void *list, struct FrameListData *cx_frame)
  136. {
  137. struct FrameListData **p = list;
  138. while (*p)
  139. p = &(*p)->next;
  140. *p = cx_frame;
  141. cx_frame->next = NULL;
  142. }
  143. static av_cold void free_coded_frame(struct FrameListData *cx_frame)
  144. {
  145. av_freep(&cx_frame->buf);
  146. av_freep(&cx_frame);
  147. }
  148. static av_cold void free_frame_list(struct FrameListData *list)
  149. {
  150. struct FrameListData *p = list;
  151. while (p) {
  152. list = list->next;
  153. free_coded_frame(p);
  154. p = list;
  155. }
  156. }
  157. static av_cold int codecctl_int(AVCodecContext *avctx,
  158. enum aome_enc_control_id id, int val)
  159. {
  160. AOMContext *ctx = avctx->priv_data;
  161. char buf[80];
  162. int width = -30;
  163. int res;
  164. snprintf(buf, sizeof(buf), "%s:", ctlidstr[id]);
  165. av_log(avctx, AV_LOG_DEBUG, " %*s%d\n", width, buf, val);
  166. res = aom_codec_control(&ctx->encoder, id, val);
  167. if (res != AOM_CODEC_OK) {
  168. snprintf(buf, sizeof(buf), "Failed to set %s codec control",
  169. ctlidstr[id]);
  170. log_encoder_error(avctx, buf);
  171. return AVERROR(EINVAL);
  172. }
  173. return 0;
  174. }
  175. static av_cold int aom_free(AVCodecContext *avctx)
  176. {
  177. AOMContext *ctx = avctx->priv_data;
  178. aom_codec_destroy(&ctx->encoder);
  179. av_freep(&ctx->twopass_stats.buf);
  180. av_freep(&avctx->stats_out);
  181. free_frame_list(ctx->coded_frame_list);
  182. return 0;
  183. }
  184. static int set_pix_fmt(AVCodecContext *avctx, aom_codec_caps_t codec_caps,
  185. struct aom_codec_enc_cfg *enccfg, aom_codec_flags_t *flags,
  186. aom_img_fmt_t *img_fmt)
  187. {
  188. AOMContext av_unused *ctx = avctx->priv_data;
  189. enccfg->g_bit_depth = enccfg->g_input_bit_depth = 8;
  190. switch (avctx->pix_fmt) {
  191. case AV_PIX_FMT_YUV420P:
  192. enccfg->g_profile = FF_PROFILE_AV1_MAIN;
  193. *img_fmt = AOM_IMG_FMT_I420;
  194. return 0;
  195. case AV_PIX_FMT_YUV422P:
  196. enccfg->g_profile = FF_PROFILE_AV1_PROFESSIONAL;
  197. *img_fmt = AOM_IMG_FMT_I422;
  198. return 0;
  199. case AV_PIX_FMT_GBRP:
  200. case AV_PIX_FMT_YUV444P:
  201. enccfg->g_profile = FF_PROFILE_AV1_HIGH;
  202. *img_fmt = AOM_IMG_FMT_I444;
  203. return 0;
  204. case AV_PIX_FMT_YUV420P10:
  205. case AV_PIX_FMT_YUV420P12:
  206. if (codec_caps & AOM_CODEC_CAP_HIGHBITDEPTH) {
  207. enccfg->g_bit_depth = enccfg->g_input_bit_depth =
  208. avctx->pix_fmt == AV_PIX_FMT_YUV420P10 ? 10 : 12;
  209. enccfg->g_profile =
  210. enccfg->g_bit_depth == 10 ? FF_PROFILE_AV1_MAIN : FF_PROFILE_AV1_PROFESSIONAL;
  211. *img_fmt = AOM_IMG_FMT_I42016;
  212. *flags |= AOM_CODEC_USE_HIGHBITDEPTH;
  213. return 0;
  214. }
  215. break;
  216. case AV_PIX_FMT_YUV422P10:
  217. case AV_PIX_FMT_YUV422P12:
  218. if (codec_caps & AOM_CODEC_CAP_HIGHBITDEPTH) {
  219. enccfg->g_bit_depth = enccfg->g_input_bit_depth =
  220. avctx->pix_fmt == AV_PIX_FMT_YUV422P10 ? 10 : 12;
  221. enccfg->g_profile = FF_PROFILE_AV1_PROFESSIONAL;
  222. *img_fmt = AOM_IMG_FMT_I42216;
  223. *flags |= AOM_CODEC_USE_HIGHBITDEPTH;
  224. return 0;
  225. }
  226. break;
  227. case AV_PIX_FMT_GBRP10:
  228. case AV_PIX_FMT_GBRP12:
  229. case AV_PIX_FMT_YUV444P10:
  230. case AV_PIX_FMT_YUV444P12:
  231. if (codec_caps & AOM_CODEC_CAP_HIGHBITDEPTH) {
  232. enccfg->g_bit_depth = enccfg->g_input_bit_depth =
  233. avctx->pix_fmt == AV_PIX_FMT_YUV444P10 ||
  234. avctx->pix_fmt == AV_PIX_FMT_GBRP10 ? 10 : 12;
  235. enccfg->g_profile =
  236. enccfg->g_bit_depth == 10 ? FF_PROFILE_AV1_HIGH : FF_PROFILE_AV1_PROFESSIONAL;
  237. *img_fmt = AOM_IMG_FMT_I44416;
  238. *flags |= AOM_CODEC_USE_HIGHBITDEPTH;
  239. return 0;
  240. }
  241. break;
  242. default:
  243. break;
  244. }
  245. av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n");
  246. return AVERROR_INVALIDDATA;
  247. }
  248. static void set_color_range(AVCodecContext *avctx)
  249. {
  250. enum aom_color_range aom_cr;
  251. switch (avctx->color_range) {
  252. case AVCOL_RANGE_UNSPECIFIED:
  253. case AVCOL_RANGE_MPEG: aom_cr = AOM_CR_STUDIO_RANGE; break;
  254. case AVCOL_RANGE_JPEG: aom_cr = AOM_CR_FULL_RANGE; break;
  255. default:
  256. av_log(avctx, AV_LOG_WARNING, "Unsupported color range (%d)\n",
  257. avctx->color_range);
  258. return;
  259. }
  260. codecctl_int(avctx, AV1E_SET_COLOR_RANGE, aom_cr);
  261. }
  262. static av_cold int aom_init(AVCodecContext *avctx,
  263. const struct aom_codec_iface *iface)
  264. {
  265. AOMContext *ctx = avctx->priv_data;
  266. struct aom_codec_enc_cfg enccfg = { 0 };
  267. aom_codec_flags_t flags = 0;
  268. AVCPBProperties *cpb_props;
  269. int res;
  270. aom_img_fmt_t img_fmt;
  271. aom_codec_caps_t codec_caps = aom_codec_get_caps(iface);
  272. av_log(avctx, AV_LOG_INFO, "%s\n", aom_codec_version_str());
  273. av_log(avctx, AV_LOG_VERBOSE, "%s\n", aom_codec_build_config());
  274. if ((res = aom_codec_enc_config_default(iface, &enccfg, 0)) != AOM_CODEC_OK) {
  275. av_log(avctx, AV_LOG_ERROR, "Failed to get config: %s\n",
  276. aom_codec_err_to_string(res));
  277. return AVERROR(EINVAL);
  278. }
  279. if (set_pix_fmt(avctx, codec_caps, &enccfg, &flags, &img_fmt))
  280. return AVERROR(EINVAL);
  281. if(!avctx->bit_rate)
  282. if(avctx->rc_max_rate || avctx->rc_buffer_size || avctx->rc_initial_buffer_occupancy) {
  283. av_log( avctx, AV_LOG_ERROR, "Rate control parameters set without a bitrate\n");
  284. return AVERROR(EINVAL);
  285. }
  286. dump_enc_cfg(avctx, &enccfg);
  287. enccfg.g_w = avctx->width;
  288. enccfg.g_h = avctx->height;
  289. enccfg.g_timebase.num = avctx->time_base.num;
  290. enccfg.g_timebase.den = avctx->time_base.den;
  291. enccfg.g_threads = avctx->thread_count;
  292. if (ctx->lag_in_frames >= 0)
  293. enccfg.g_lag_in_frames = ctx->lag_in_frames;
  294. if (avctx->flags & AV_CODEC_FLAG_PASS1)
  295. enccfg.g_pass = AOM_RC_FIRST_PASS;
  296. else if (avctx->flags & AV_CODEC_FLAG_PASS2)
  297. enccfg.g_pass = AOM_RC_LAST_PASS;
  298. else
  299. enccfg.g_pass = AOM_RC_ONE_PASS;
  300. if (avctx->rc_min_rate == avctx->rc_max_rate &&
  301. avctx->rc_min_rate == avctx->bit_rate && avctx->bit_rate) {
  302. enccfg.rc_end_usage = AOM_CBR;
  303. } else if (ctx->crf >= 0) {
  304. enccfg.rc_end_usage = AOM_CQ;
  305. if (!avctx->bit_rate)
  306. enccfg.rc_end_usage = AOM_Q;
  307. }
  308. if (avctx->bit_rate) {
  309. enccfg.rc_target_bitrate = av_rescale_rnd(avctx->bit_rate, 1, 1000,
  310. AV_ROUND_NEAR_INF);
  311. } else if (enccfg.rc_end_usage != AOM_Q) {
  312. if (enccfg.rc_end_usage == AOM_CQ) {
  313. enccfg.rc_target_bitrate = 1000000;
  314. } else {
  315. avctx->bit_rate = enccfg.rc_target_bitrate * 1000;
  316. av_log(avctx, AV_LOG_WARNING,
  317. "Neither bitrate nor constrained quality specified, using default bitrate of %dkbit/sec\n",
  318. enccfg.rc_target_bitrate);
  319. }
  320. }
  321. if (avctx->qmin >= 0)
  322. enccfg.rc_min_quantizer = avctx->qmin;
  323. if (avctx->qmax >= 0)
  324. enccfg.rc_max_quantizer = avctx->qmax;
  325. if (enccfg.rc_end_usage == AOM_CQ || enccfg.rc_end_usage == AOM_Q) {
  326. if (ctx->crf < enccfg.rc_min_quantizer || ctx->crf > enccfg.rc_max_quantizer) {
  327. av_log(avctx, AV_LOG_ERROR,
  328. "CQ level %d must be between minimum and maximum quantizer value (%d-%d)\n",
  329. ctx->crf, enccfg.rc_min_quantizer, enccfg.rc_max_quantizer);
  330. return AVERROR(EINVAL);
  331. }
  332. }
  333. enccfg.rc_dropframe_thresh = ctx->drop_threshold;
  334. // 0-100 (0 => CBR, 100 => VBR)
  335. enccfg.rc_2pass_vbr_bias_pct = round(avctx->qcompress * 100);
  336. if (avctx->bit_rate)
  337. enccfg.rc_2pass_vbr_minsection_pct =
  338. avctx->rc_min_rate * 100LL / avctx->bit_rate;
  339. if (avctx->rc_max_rate)
  340. enccfg.rc_2pass_vbr_maxsection_pct =
  341. avctx->rc_max_rate * 100LL / avctx->bit_rate;
  342. if (avctx->rc_buffer_size)
  343. enccfg.rc_buf_sz =
  344. avctx->rc_buffer_size * 1000LL / avctx->bit_rate;
  345. if (avctx->rc_initial_buffer_occupancy)
  346. enccfg.rc_buf_initial_sz =
  347. avctx->rc_initial_buffer_occupancy * 1000LL / avctx->bit_rate;
  348. enccfg.rc_buf_optimal_sz = enccfg.rc_buf_sz * 5 / 6;
  349. // _enc_init() will balk if kf_min_dist differs from max w/AOM_KF_AUTO
  350. if (avctx->keyint_min >= 0 && avctx->keyint_min == avctx->gop_size)
  351. enccfg.kf_min_dist = avctx->keyint_min;
  352. if (avctx->gop_size >= 0)
  353. enccfg.kf_max_dist = avctx->gop_size;
  354. if (enccfg.g_pass == AOM_RC_FIRST_PASS)
  355. enccfg.g_lag_in_frames = 0;
  356. else if (enccfg.g_pass == AOM_RC_LAST_PASS) {
  357. int decode_size, ret;
  358. if (!avctx->stats_in) {
  359. av_log(avctx, AV_LOG_ERROR, "No stats file for second pass\n");
  360. return AVERROR_INVALIDDATA;
  361. }
  362. ctx->twopass_stats.sz = strlen(avctx->stats_in) * 3 / 4;
  363. ret = av_reallocp(&ctx->twopass_stats.buf, ctx->twopass_stats.sz);
  364. if (ret < 0) {
  365. av_log(avctx, AV_LOG_ERROR,
  366. "Stat buffer alloc (%"SIZE_SPECIFIER" bytes) failed\n",
  367. ctx->twopass_stats.sz);
  368. ctx->twopass_stats.sz = 0;
  369. return ret;
  370. }
  371. decode_size = av_base64_decode(ctx->twopass_stats.buf, avctx->stats_in,
  372. ctx->twopass_stats.sz);
  373. if (decode_size < 0) {
  374. av_log(avctx, AV_LOG_ERROR, "Stat buffer decode failed\n");
  375. return AVERROR_INVALIDDATA;
  376. }
  377. ctx->twopass_stats.sz = decode_size;
  378. enccfg.rc_twopass_stats_in = ctx->twopass_stats;
  379. }
  380. /* 0-3: For non-zero values the encoder increasingly optimizes for reduced
  381. * complexity playback on low powered devices at the expense of encode
  382. * quality. */
  383. if (avctx->profile != FF_PROFILE_UNKNOWN)
  384. enccfg.g_profile = avctx->profile;
  385. enccfg.g_error_resilient = ctx->error_resilient;
  386. dump_enc_cfg(avctx, &enccfg);
  387. /* Construct Encoder Context */
  388. res = aom_codec_enc_init(&ctx->encoder, iface, &enccfg, flags);
  389. if (res != AOM_CODEC_OK) {
  390. log_encoder_error(avctx, "Failed to initialize encoder");
  391. return AVERROR(EINVAL);
  392. }
  393. // codec control failures are currently treated only as warnings
  394. av_log(avctx, AV_LOG_DEBUG, "aom_codec_control\n");
  395. codecctl_int(avctx, AOME_SET_CPUUSED, ctx->cpu_used);
  396. if (ctx->auto_alt_ref >= 0)
  397. codecctl_int(avctx, AOME_SET_ENABLEAUTOALTREF, ctx->auto_alt_ref);
  398. codecctl_int(avctx, AOME_SET_STATIC_THRESHOLD, ctx->static_thresh);
  399. if (ctx->crf >= 0)
  400. codecctl_int(avctx, AOME_SET_CQ_LEVEL, ctx->crf);
  401. codecctl_int(avctx, AV1E_SET_COLOR_PRIMARIES, avctx->color_primaries);
  402. codecctl_int(avctx, AV1E_SET_MATRIX_COEFFICIENTS, avctx->colorspace);
  403. codecctl_int(avctx, AV1E_SET_TRANSFER_CHARACTERISTICS, avctx->color_trc);
  404. set_color_range(avctx);
  405. // provide dummy value to initialize wrapper, values will be updated each _encode()
  406. aom_img_wrap(&ctx->rawimg, img_fmt, avctx->width, avctx->height, 1,
  407. (unsigned char*)1);
  408. if (codec_caps & AOM_CODEC_CAP_HIGHBITDEPTH)
  409. ctx->rawimg.bit_depth = enccfg.g_bit_depth;
  410. cpb_props = ff_add_cpb_side_data(avctx);
  411. if (!cpb_props)
  412. return AVERROR(ENOMEM);
  413. if (enccfg.rc_end_usage == AOM_CBR ||
  414. enccfg.g_pass != AOM_RC_ONE_PASS) {
  415. cpb_props->max_bitrate = avctx->rc_max_rate;
  416. cpb_props->min_bitrate = avctx->rc_min_rate;
  417. cpb_props->avg_bitrate = avctx->bit_rate;
  418. }
  419. cpb_props->buffer_size = avctx->rc_buffer_size;
  420. return 0;
  421. }
  422. static inline void cx_pktcpy(struct FrameListData *dst,
  423. const struct aom_codec_cx_pkt *src)
  424. {
  425. dst->pts = src->data.frame.pts;
  426. dst->duration = src->data.frame.duration;
  427. dst->flags = src->data.frame.flags;
  428. dst->sz = src->data.frame.sz;
  429. dst->buf = src->data.frame.buf;
  430. }
  431. /**
  432. * Store coded frame information in format suitable for return from encode2().
  433. *
  434. * Write information from @a cx_frame to @a pkt
  435. * @return packet data size on success
  436. * @return a negative AVERROR on error
  437. */
  438. static int storeframe(AVCodecContext *avctx, struct FrameListData *cx_frame,
  439. AVPacket *pkt)
  440. {
  441. int ret = ff_alloc_packet2(avctx, pkt, cx_frame->sz, 0);
  442. if (ret < 0) {
  443. av_log(avctx, AV_LOG_ERROR,
  444. "Error getting output packet of size %"SIZE_SPECIFIER".\n", cx_frame->sz);
  445. return ret;
  446. }
  447. memcpy(pkt->data, cx_frame->buf, pkt->size);
  448. pkt->pts = pkt->dts = cx_frame->pts;
  449. if (!!(cx_frame->flags & AOM_FRAME_IS_KEY))
  450. pkt->flags |= AV_PKT_FLAG_KEY;
  451. return pkt->size;
  452. }
  453. /**
  454. * Queue multiple output frames from the encoder, returning the front-most.
  455. * In cases where aom_codec_get_cx_data() returns more than 1 frame append
  456. * the frame queue. Return the head frame if available.
  457. * @return Stored frame size
  458. * @return AVERROR(EINVAL) on output size error
  459. * @return AVERROR(ENOMEM) on coded frame queue data allocation error
  460. */
  461. static int queue_frames(AVCodecContext *avctx, AVPacket *pkt_out)
  462. {
  463. AOMContext *ctx = avctx->priv_data;
  464. const struct aom_codec_cx_pkt *pkt;
  465. const void *iter = NULL;
  466. int size = 0;
  467. if (ctx->coded_frame_list) {
  468. struct FrameListData *cx_frame = ctx->coded_frame_list;
  469. /* return the leading frame if we've already begun queueing */
  470. size = storeframe(avctx, cx_frame, pkt_out);
  471. if (size < 0)
  472. return size;
  473. ctx->coded_frame_list = cx_frame->next;
  474. free_coded_frame(cx_frame);
  475. }
  476. /* consume all available output from the encoder before returning. buffers
  477. * are only good through the next aom_codec call */
  478. while ((pkt = aom_codec_get_cx_data(&ctx->encoder, &iter))) {
  479. switch (pkt->kind) {
  480. case AOM_CODEC_CX_FRAME_PKT:
  481. if (!size) {
  482. struct FrameListData cx_frame;
  483. /* avoid storing the frame when the list is empty and we haven't yet
  484. * provided a frame for output */
  485. assert(!ctx->coded_frame_list);
  486. cx_pktcpy(&cx_frame, pkt);
  487. size = storeframe(avctx, &cx_frame, pkt_out);
  488. if (size < 0)
  489. return size;
  490. } else {
  491. struct FrameListData *cx_frame =
  492. av_malloc(sizeof(struct FrameListData));
  493. if (!cx_frame) {
  494. av_log(avctx, AV_LOG_ERROR,
  495. "Frame queue element alloc failed\n");
  496. return AVERROR(ENOMEM);
  497. }
  498. cx_pktcpy(cx_frame, pkt);
  499. cx_frame->buf = av_malloc(cx_frame->sz);
  500. if (!cx_frame->buf) {
  501. av_log(avctx, AV_LOG_ERROR,
  502. "Data buffer alloc (%"SIZE_SPECIFIER" bytes) failed\n",
  503. cx_frame->sz);
  504. av_freep(&cx_frame);
  505. return AVERROR(ENOMEM);
  506. }
  507. memcpy(cx_frame->buf, pkt->data.frame.buf, pkt->data.frame.sz);
  508. coded_frame_add(&ctx->coded_frame_list, cx_frame);
  509. }
  510. break;
  511. case AOM_CODEC_STATS_PKT:
  512. {
  513. struct aom_fixed_buf *stats = &ctx->twopass_stats;
  514. int err;
  515. if ((err = av_reallocp(&stats->buf,
  516. stats->sz +
  517. pkt->data.twopass_stats.sz)) < 0) {
  518. stats->sz = 0;
  519. av_log(avctx, AV_LOG_ERROR, "Stat buffer realloc failed\n");
  520. return err;
  521. }
  522. memcpy((uint8_t *)stats->buf + stats->sz,
  523. pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz);
  524. stats->sz += pkt->data.twopass_stats.sz;
  525. break;
  526. }
  527. case AOM_CODEC_PSNR_PKT: // FIXME add support for AV_CODEC_FLAG_PSNR
  528. case AOM_CODEC_CUSTOM_PKT:
  529. // ignore unsupported/unrecognized packet types
  530. break;
  531. }
  532. }
  533. return size;
  534. }
  535. static int aom_encode(AVCodecContext *avctx, AVPacket *pkt,
  536. const AVFrame *frame, int *got_packet)
  537. {
  538. AOMContext *ctx = avctx->priv_data;
  539. struct aom_image *rawimg = NULL;
  540. int64_t timestamp = 0;
  541. int res, coded_size;
  542. aom_enc_frame_flags_t flags = 0;
  543. if (frame) {
  544. rawimg = &ctx->rawimg;
  545. rawimg->planes[AOM_PLANE_Y] = frame->data[0];
  546. rawimg->planes[AOM_PLANE_U] = frame->data[1];
  547. rawimg->planes[AOM_PLANE_V] = frame->data[2];
  548. rawimg->stride[AOM_PLANE_Y] = frame->linesize[0];
  549. rawimg->stride[AOM_PLANE_U] = frame->linesize[1];
  550. rawimg->stride[AOM_PLANE_V] = frame->linesize[2];
  551. timestamp = frame->pts;
  552. switch (frame->color_range) {
  553. case AVCOL_RANGE_MPEG:
  554. rawimg->range = AOM_CR_STUDIO_RANGE;
  555. break;
  556. case AVCOL_RANGE_JPEG:
  557. rawimg->range = AOM_CR_FULL_RANGE;
  558. break;
  559. }
  560. if (frame->pict_type == AV_PICTURE_TYPE_I)
  561. flags |= AOM_EFLAG_FORCE_KF;
  562. }
  563. res = aom_codec_encode(&ctx->encoder, rawimg, timestamp,
  564. avctx->ticks_per_frame, flags);
  565. if (res != AOM_CODEC_OK) {
  566. log_encoder_error(avctx, "Error encoding frame");
  567. return AVERROR_INVALIDDATA;
  568. }
  569. coded_size = queue_frames(avctx, pkt);
  570. if (!frame && avctx->flags & AV_CODEC_FLAG_PASS1) {
  571. size_t b64_size = AV_BASE64_SIZE(ctx->twopass_stats.sz);
  572. avctx->stats_out = av_malloc(b64_size);
  573. if (!avctx->stats_out) {
  574. av_log(avctx, AV_LOG_ERROR, "Stat buffer alloc (%"SIZE_SPECIFIER" bytes) failed\n",
  575. b64_size);
  576. return AVERROR(ENOMEM);
  577. }
  578. av_base64_encode(avctx->stats_out, b64_size, ctx->twopass_stats.buf,
  579. ctx->twopass_stats.sz);
  580. }
  581. *got_packet = !!coded_size;
  582. return 0;
  583. }
  584. static const enum AVPixelFormat av1_pix_fmts[] = {
  585. AV_PIX_FMT_YUV420P,
  586. AV_PIX_FMT_YUV422P,
  587. AV_PIX_FMT_YUV444P,
  588. AV_PIX_FMT_GBRP,
  589. AV_PIX_FMT_NONE
  590. };
  591. static const enum AVPixelFormat av1_pix_fmts_highbd[] = {
  592. AV_PIX_FMT_YUV420P,
  593. AV_PIX_FMT_YUV422P,
  594. AV_PIX_FMT_YUV444P,
  595. AV_PIX_FMT_YUV420P10,
  596. AV_PIX_FMT_YUV422P10,
  597. AV_PIX_FMT_YUV444P10,
  598. AV_PIX_FMT_YUV420P12,
  599. AV_PIX_FMT_YUV422P12,
  600. AV_PIX_FMT_YUV444P12,
  601. AV_PIX_FMT_GBRP,
  602. AV_PIX_FMT_GBRP10,
  603. AV_PIX_FMT_GBRP12,
  604. AV_PIX_FMT_NONE
  605. };
  606. static av_cold void av1_init_static(AVCodec *codec)
  607. {
  608. aom_codec_caps_t codec_caps = aom_codec_get_caps(aom_codec_av1_cx());
  609. if (codec_caps & AOM_CODEC_CAP_HIGHBITDEPTH)
  610. codec->pix_fmts = av1_pix_fmts_highbd;
  611. else
  612. codec->pix_fmts = av1_pix_fmts;
  613. }
  614. static av_cold int av1_init(AVCodecContext *avctx)
  615. {
  616. return aom_init(avctx, aom_codec_av1_cx());
  617. }
  618. #define OFFSET(x) offsetof(AOMContext, x)
  619. #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
  620. static const AVOption options[] = {
  621. { "cpu-used", "Quality/Speed ratio modifier", OFFSET(cpu_used), AV_OPT_TYPE_INT, {.i64 = 1}, -8, 8, VE},
  622. { "auto-alt-ref", "Enable use of alternate reference "
  623. "frames (2-pass only)", OFFSET(auto_alt_ref), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 2, VE},
  624. { "lag-in-frames", "Number of frames to look ahead at for "
  625. "alternate reference frame selection", OFFSET(lag_in_frames), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, VE},
  626. { "error-resilience", "Error resilience configuration", OFFSET(error_resilient), AV_OPT_TYPE_FLAGS, {.i64 = 0}, INT_MIN, INT_MAX, VE, "er"},
  627. { "default", "Improve resiliency against losses of whole frames", 0, AV_OPT_TYPE_CONST, {.i64 = AOM_ERROR_RESILIENT_DEFAULT}, 0, 0, VE, "er"},
  628. { "partitions", "The frame partitions are independently decodable "
  629. "by the bool decoder, meaning that partitions can be decoded even "
  630. "though earlier partitions have been lost. Note that intra predicition"
  631. " is still done over the partition boundary.", 0, AV_OPT_TYPE_CONST, {.i64 = AOM_ERROR_RESILIENT_PARTITIONS}, 0, 0, VE, "er"},
  632. { "crf", "Select the quality for constant quality mode", offsetof(AOMContext, crf), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 63, VE },
  633. { "static-thresh", "A change threshold on blocks below which they will be skipped by the encoder", OFFSET(static_thresh), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, VE },
  634. { "drop-threshold", "Frame drop threshold", offsetof(AOMContext, drop_threshold), AV_OPT_TYPE_INT, {.i64 = 0 }, INT_MIN, INT_MAX, VE },
  635. { "noise-sensitivity", "Noise sensitivity", OFFSET(noise_sensitivity), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 4, VE},
  636. { NULL }
  637. };
  638. static const AVCodecDefault defaults[] = {
  639. { "qmin", "-1" },
  640. { "qmax", "-1" },
  641. { "g", "-1" },
  642. { "keyint_min", "-1" },
  643. { NULL },
  644. };
  645. static const AVClass class_aom = {
  646. .class_name = "libaom-av1 encoder",
  647. .item_name = av_default_item_name,
  648. .option = options,
  649. .version = LIBAVUTIL_VERSION_INT,
  650. };
  651. AVCodec ff_libaom_av1_encoder = {
  652. .name = "libaom-av1",
  653. .long_name = NULL_IF_CONFIG_SMALL("libaom AV1"),
  654. .type = AVMEDIA_TYPE_VIDEO,
  655. .id = AV_CODEC_ID_AV1,
  656. .priv_data_size = sizeof(AOMContext),
  657. .init = av1_init,
  658. .encode2 = aom_encode,
  659. .close = aom_free,
  660. .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AUTO_THREADS | AV_CODEC_CAP_EXPERIMENTAL,
  661. .profiles = NULL_IF_CONFIG_SMALL(ff_av1_profiles),
  662. .priv_class = &class_aom,
  663. .defaults = defaults,
  664. .init_static_data = av1_init_static,
  665. .wrapper_name = "libaom",
  666. };