You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1529 lines
60KB

  1. /*
  2. * Copyright (c) 2010, Google, Inc.
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * VP8/9 encoder support via libvpx
  23. */
  24. #define VPX_DISABLE_CTRL_TYPECHECKS 1
  25. #define VPX_CODEC_DISABLE_COMPAT 1
  26. #include <vpx/vpx_encoder.h>
  27. #include <vpx/vp8cx.h>
  28. #include "avcodec.h"
  29. #include "internal.h"
  30. #include "libavutil/avassert.h"
  31. #include "libvpx.h"
  32. #include "profiles.h"
  33. #include "libavutil/avstring.h"
  34. #include "libavutil/base64.h"
  35. #include "libavutil/common.h"
  36. #include "libavutil/internal.h"
  37. #include "libavutil/intreadwrite.h"
  38. #include "libavutil/mathematics.h"
  39. #include "libavutil/opt.h"
  40. /**
  41. * Portion of struct vpx_codec_cx_pkt from vpx_encoder.h.
  42. * One encoded frame returned from the library.
  43. */
  44. struct FrameListData {
  45. void *buf; /**< compressed data buffer */
  46. size_t sz; /**< length of compressed data */
  47. void *buf_alpha;
  48. size_t sz_alpha;
  49. int64_t pts; /**< time stamp to show frame
  50. (in timebase units) */
  51. unsigned long duration; /**< duration to show frame
  52. (in timebase units) */
  53. uint32_t flags; /**< flags for this frame */
  54. uint64_t sse[4];
  55. int have_sse; /**< true if we have pending sse[] */
  56. uint64_t frame_number;
  57. struct FrameListData *next;
  58. };
  59. typedef struct VPxEncoderContext {
  60. AVClass *class;
  61. struct vpx_codec_ctx encoder;
  62. struct vpx_image rawimg;
  63. struct vpx_codec_ctx encoder_alpha;
  64. struct vpx_image rawimg_alpha;
  65. uint8_t is_alpha;
  66. struct vpx_fixed_buf twopass_stats;
  67. int deadline; //i.e., RT/GOOD/BEST
  68. uint64_t sse[4];
  69. int have_sse; /**< true if we have pending sse[] */
  70. uint64_t frame_number;
  71. struct FrameListData *coded_frame_list;
  72. int cpu_used;
  73. int sharpness;
  74. /**
  75. * VP8 specific flags, see VP8F_* below.
  76. */
  77. int flags;
  78. #define VP8F_ERROR_RESILIENT 0x00000001 ///< Enable measures appropriate for streaming over lossy links
  79. #define VP8F_AUTO_ALT_REF 0x00000002 ///< Enable automatic alternate reference frame generation
  80. int auto_alt_ref;
  81. int arnr_max_frames;
  82. int arnr_strength;
  83. int arnr_type;
  84. int tune;
  85. int lag_in_frames;
  86. int error_resilient;
  87. int crf;
  88. int static_thresh;
  89. int max_intra_rate;
  90. int rc_undershoot_pct;
  91. int rc_overshoot_pct;
  92. char *vp8_ts_parameters;
  93. // VP9-only
  94. int lossless;
  95. int tile_columns;
  96. int tile_rows;
  97. int frame_parallel;
  98. int aq_mode;
  99. int drop_threshold;
  100. int noise_sensitivity;
  101. int vpx_cs;
  102. float level;
  103. int row_mt;
  104. int tune_content;
  105. int corpus_complexity;
  106. int tpl_model;
  107. /**
  108. * If the driver does not support ROI then warn the first time we
  109. * encounter a frame with ROI side data.
  110. */
  111. int roi_warned;
  112. } VPxContext;
  113. /** String mappings for enum vp8e_enc_control_id */
  114. static const char *const ctlidstr[] = {
  115. [VP8E_SET_CPUUSED] = "VP8E_SET_CPUUSED",
  116. [VP8E_SET_ENABLEAUTOALTREF] = "VP8E_SET_ENABLEAUTOALTREF",
  117. [VP8E_SET_NOISE_SENSITIVITY] = "VP8E_SET_NOISE_SENSITIVITY",
  118. [VP8E_SET_STATIC_THRESHOLD] = "VP8E_SET_STATIC_THRESHOLD",
  119. [VP8E_SET_TOKEN_PARTITIONS] = "VP8E_SET_TOKEN_PARTITIONS",
  120. [VP8E_SET_ARNR_MAXFRAMES] = "VP8E_SET_ARNR_MAXFRAMES",
  121. [VP8E_SET_ARNR_STRENGTH] = "VP8E_SET_ARNR_STRENGTH",
  122. [VP8E_SET_ARNR_TYPE] = "VP8E_SET_ARNR_TYPE",
  123. [VP8E_SET_TUNING] = "VP8E_SET_TUNING",
  124. [VP8E_SET_CQ_LEVEL] = "VP8E_SET_CQ_LEVEL",
  125. [VP8E_SET_MAX_INTRA_BITRATE_PCT] = "VP8E_SET_MAX_INTRA_BITRATE_PCT",
  126. [VP8E_SET_SHARPNESS] = "VP8E_SET_SHARPNESS",
  127. #if CONFIG_LIBVPX_VP9_ENCODER
  128. [VP9E_SET_LOSSLESS] = "VP9E_SET_LOSSLESS",
  129. [VP9E_SET_TILE_COLUMNS] = "VP9E_SET_TILE_COLUMNS",
  130. [VP9E_SET_TILE_ROWS] = "VP9E_SET_TILE_ROWS",
  131. [VP9E_SET_FRAME_PARALLEL_DECODING] = "VP9E_SET_FRAME_PARALLEL_DECODING",
  132. [VP9E_SET_AQ_MODE] = "VP9E_SET_AQ_MODE",
  133. [VP9E_SET_COLOR_SPACE] = "VP9E_SET_COLOR_SPACE",
  134. #if VPX_ENCODER_ABI_VERSION >= 11
  135. [VP9E_SET_COLOR_RANGE] = "VP9E_SET_COLOR_RANGE",
  136. #endif
  137. #if VPX_ENCODER_ABI_VERSION >= 12
  138. [VP9E_SET_TARGET_LEVEL] = "VP9E_SET_TARGET_LEVEL",
  139. [VP9E_GET_LEVEL] = "VP9E_GET_LEVEL",
  140. #endif
  141. #ifdef VPX_CTRL_VP9E_SET_ROW_MT
  142. [VP9E_SET_ROW_MT] = "VP9E_SET_ROW_MT",
  143. #endif
  144. #ifdef VPX_CTRL_VP9E_SET_TUNE_CONTENT
  145. [VP9E_SET_TUNE_CONTENT] = "VP9E_SET_TUNE_CONTENT",
  146. #endif
  147. #ifdef VPX_CTRL_VP9E_SET_TPL
  148. [VP9E_SET_TPL] = "VP9E_SET_TPL",
  149. #endif
  150. #endif
  151. };
  152. static av_cold void log_encoder_error(AVCodecContext *avctx, const char *desc)
  153. {
  154. VPxContext *ctx = avctx->priv_data;
  155. const char *error = vpx_codec_error(&ctx->encoder);
  156. const char *detail = vpx_codec_error_detail(&ctx->encoder);
  157. av_log(avctx, AV_LOG_ERROR, "%s: %s\n", desc, error);
  158. if (detail)
  159. av_log(avctx, AV_LOG_ERROR, " Additional information: %s\n", detail);
  160. }
  161. static av_cold void dump_enc_cfg(AVCodecContext *avctx,
  162. const struct vpx_codec_enc_cfg *cfg)
  163. {
  164. int width = -30;
  165. int level = AV_LOG_DEBUG;
  166. int i;
  167. av_log(avctx, level, "vpx_codec_enc_cfg\n");
  168. av_log(avctx, level, "generic settings\n"
  169. " %*s%u\n %*s%u\n %*s%u\n %*s%u\n %*s%u\n"
  170. #if CONFIG_LIBVPX_VP9_ENCODER
  171. " %*s%u\n %*s%u\n"
  172. #endif
  173. " %*s{%u/%u}\n %*s%u\n %*s%d\n %*s%u\n",
  174. width, "g_usage:", cfg->g_usage,
  175. width, "g_threads:", cfg->g_threads,
  176. width, "g_profile:", cfg->g_profile,
  177. width, "g_w:", cfg->g_w,
  178. width, "g_h:", cfg->g_h,
  179. #if CONFIG_LIBVPX_VP9_ENCODER
  180. width, "g_bit_depth:", cfg->g_bit_depth,
  181. width, "g_input_bit_depth:", cfg->g_input_bit_depth,
  182. #endif
  183. width, "g_timebase:", cfg->g_timebase.num, cfg->g_timebase.den,
  184. width, "g_error_resilient:", cfg->g_error_resilient,
  185. width, "g_pass:", cfg->g_pass,
  186. width, "g_lag_in_frames:", cfg->g_lag_in_frames);
  187. av_log(avctx, level, "rate control settings\n"
  188. " %*s%u\n %*s%u\n %*s%u\n %*s%u\n"
  189. " %*s%d\n %*s%p(%"SIZE_SPECIFIER")\n %*s%u\n",
  190. width, "rc_dropframe_thresh:", cfg->rc_dropframe_thresh,
  191. width, "rc_resize_allowed:", cfg->rc_resize_allowed,
  192. width, "rc_resize_up_thresh:", cfg->rc_resize_up_thresh,
  193. width, "rc_resize_down_thresh:", cfg->rc_resize_down_thresh,
  194. width, "rc_end_usage:", cfg->rc_end_usage,
  195. width, "rc_twopass_stats_in:", cfg->rc_twopass_stats_in.buf, cfg->rc_twopass_stats_in.sz,
  196. width, "rc_target_bitrate:", cfg->rc_target_bitrate);
  197. av_log(avctx, level, "quantizer settings\n"
  198. " %*s%u\n %*s%u\n",
  199. width, "rc_min_quantizer:", cfg->rc_min_quantizer,
  200. width, "rc_max_quantizer:", cfg->rc_max_quantizer);
  201. av_log(avctx, level, "bitrate tolerance\n"
  202. " %*s%u\n %*s%u\n",
  203. width, "rc_undershoot_pct:", cfg->rc_undershoot_pct,
  204. width, "rc_overshoot_pct:", cfg->rc_overshoot_pct);
  205. av_log(avctx, level, "temporal layering settings\n"
  206. " %*s%u\n", width, "ts_number_layers:", cfg->ts_number_layers);
  207. av_log(avctx, level,
  208. "\n %*s", width, "ts_target_bitrate:");
  209. for (i = 0; i < VPX_TS_MAX_LAYERS; i++)
  210. av_log(avctx, level, "%u ", cfg->ts_target_bitrate[i]);
  211. av_log(avctx, level, "\n");
  212. av_log(avctx, level,
  213. "\n %*s", width, "ts_rate_decimator:");
  214. for (i = 0; i < VPX_TS_MAX_LAYERS; i++)
  215. av_log(avctx, level, "%u ", cfg->ts_rate_decimator[i]);
  216. av_log(avctx, level, "\n");
  217. av_log(avctx, level,
  218. "\n %*s%u\n", width, "ts_periodicity:", cfg->ts_periodicity);
  219. av_log(avctx, level,
  220. "\n %*s", width, "ts_layer_id:");
  221. for (i = 0; i < VPX_TS_MAX_PERIODICITY; i++)
  222. av_log(avctx, level, "%u ", cfg->ts_layer_id[i]);
  223. av_log(avctx, level, "\n");
  224. av_log(avctx, level, "decoder buffer model\n"
  225. " %*s%u\n %*s%u\n %*s%u\n",
  226. width, "rc_buf_sz:", cfg->rc_buf_sz,
  227. width, "rc_buf_initial_sz:", cfg->rc_buf_initial_sz,
  228. width, "rc_buf_optimal_sz:", cfg->rc_buf_optimal_sz);
  229. av_log(avctx, level, "2 pass rate control settings\n"
  230. " %*s%u\n %*s%u\n %*s%u\n",
  231. width, "rc_2pass_vbr_bias_pct:", cfg->rc_2pass_vbr_bias_pct,
  232. width, "rc_2pass_vbr_minsection_pct:", cfg->rc_2pass_vbr_minsection_pct,
  233. width, "rc_2pass_vbr_maxsection_pct:", cfg->rc_2pass_vbr_maxsection_pct);
  234. #if VPX_ENCODER_ABI_VERSION >= 14
  235. av_log(avctx, level, " %*s%u\n",
  236. width, "rc_2pass_vbr_corpus_complexity:", cfg->rc_2pass_vbr_corpus_complexity);
  237. #endif
  238. av_log(avctx, level, "keyframing settings\n"
  239. " %*s%d\n %*s%u\n %*s%u\n",
  240. width, "kf_mode:", cfg->kf_mode,
  241. width, "kf_min_dist:", cfg->kf_min_dist,
  242. width, "kf_max_dist:", cfg->kf_max_dist);
  243. av_log(avctx, level, "\n");
  244. }
  245. static void coded_frame_add(void *list, struct FrameListData *cx_frame)
  246. {
  247. struct FrameListData **p = list;
  248. while (*p)
  249. p = &(*p)->next;
  250. *p = cx_frame;
  251. cx_frame->next = NULL;
  252. }
  253. static av_cold void free_coded_frame(struct FrameListData *cx_frame)
  254. {
  255. av_freep(&cx_frame->buf);
  256. if (cx_frame->buf_alpha)
  257. av_freep(&cx_frame->buf_alpha);
  258. av_freep(&cx_frame);
  259. }
  260. static av_cold void free_frame_list(struct FrameListData *list)
  261. {
  262. struct FrameListData *p = list;
  263. while (p) {
  264. list = list->next;
  265. free_coded_frame(p);
  266. p = list;
  267. }
  268. }
  269. static av_cold int codecctl_int(AVCodecContext *avctx,
  270. enum vp8e_enc_control_id id, int val)
  271. {
  272. VPxContext *ctx = avctx->priv_data;
  273. char buf[80];
  274. int width = -30;
  275. int res;
  276. snprintf(buf, sizeof(buf), "%s:", ctlidstr[id]);
  277. av_log(avctx, AV_LOG_DEBUG, " %*s%d\n", width, buf, val);
  278. res = vpx_codec_control(&ctx->encoder, id, val);
  279. if (res != VPX_CODEC_OK) {
  280. snprintf(buf, sizeof(buf), "Failed to set %s codec control",
  281. ctlidstr[id]);
  282. log_encoder_error(avctx, buf);
  283. }
  284. return res == VPX_CODEC_OK ? 0 : AVERROR(EINVAL);
  285. }
  286. #if VPX_ENCODER_ABI_VERSION >= 12
  287. static av_cold int codecctl_intp(AVCodecContext *avctx,
  288. enum vp8e_enc_control_id id, int *val)
  289. {
  290. VPxContext *ctx = avctx->priv_data;
  291. char buf[80];
  292. int width = -30;
  293. int res;
  294. snprintf(buf, sizeof(buf), "%s:", ctlidstr[id]);
  295. av_log(avctx, AV_LOG_DEBUG, " %*s%d\n", width, buf, *val);
  296. res = vpx_codec_control(&ctx->encoder, id, val);
  297. if (res != VPX_CODEC_OK) {
  298. snprintf(buf, sizeof(buf), "Failed to set %s codec control",
  299. ctlidstr[id]);
  300. log_encoder_error(avctx, buf);
  301. }
  302. return res == VPX_CODEC_OK ? 0 : AVERROR(EINVAL);
  303. }
  304. #endif
  305. static av_cold int vpx_free(AVCodecContext *avctx)
  306. {
  307. VPxContext *ctx = avctx->priv_data;
  308. #if VPX_ENCODER_ABI_VERSION >= 12
  309. if (avctx->codec_id == AV_CODEC_ID_VP9 && ctx->level >= 0 &&
  310. !(avctx->flags & AV_CODEC_FLAG_PASS1)) {
  311. int level_out = 0;
  312. if (!codecctl_intp(avctx, VP9E_GET_LEVEL, &level_out))
  313. av_log(avctx, AV_LOG_INFO, "Encoded level %.1f\n", level_out * 0.1);
  314. }
  315. #endif
  316. vpx_codec_destroy(&ctx->encoder);
  317. if (ctx->is_alpha)
  318. vpx_codec_destroy(&ctx->encoder_alpha);
  319. av_freep(&ctx->twopass_stats.buf);
  320. av_freep(&avctx->stats_out);
  321. free_frame_list(ctx->coded_frame_list);
  322. return 0;
  323. }
  324. static void vp8_ts_parse_int_array(int *dest, char *value, size_t value_len, int max_entries)
  325. {
  326. int dest_idx = 0;
  327. char *saveptr = NULL;
  328. char *token = av_strtok(value, ",", &saveptr);
  329. while (token && dest_idx < max_entries) {
  330. dest[dest_idx++] = strtoul(token, NULL, 10);
  331. token = av_strtok(NULL, ",", &saveptr);
  332. }
  333. }
  334. static int vp8_ts_param_parse(struct vpx_codec_enc_cfg *enccfg, char *key, char *value)
  335. {
  336. size_t value_len = strlen(value);
  337. if (!value_len)
  338. return -1;
  339. if (!strcmp(key, "ts_number_layers"))
  340. enccfg->ts_number_layers = strtoul(value, &value, 10);
  341. else if (!strcmp(key, "ts_target_bitrate"))
  342. vp8_ts_parse_int_array(enccfg->ts_target_bitrate, value, value_len, VPX_TS_MAX_LAYERS);
  343. else if (!strcmp(key, "ts_rate_decimator"))
  344. vp8_ts_parse_int_array(enccfg->ts_rate_decimator, value, value_len, VPX_TS_MAX_LAYERS);
  345. else if (!strcmp(key, "ts_periodicity"))
  346. enccfg->ts_periodicity = strtoul(value, &value, 10);
  347. else if (!strcmp(key, "ts_layer_id"))
  348. vp8_ts_parse_int_array(enccfg->ts_layer_id, value, value_len, VPX_TS_MAX_PERIODICITY);
  349. return 0;
  350. }
  351. #if CONFIG_LIBVPX_VP9_ENCODER
  352. static int set_pix_fmt(AVCodecContext *avctx, vpx_codec_caps_t codec_caps,
  353. struct vpx_codec_enc_cfg *enccfg, vpx_codec_flags_t *flags,
  354. vpx_img_fmt_t *img_fmt)
  355. {
  356. VPxContext av_unused *ctx = avctx->priv_data;
  357. enccfg->g_bit_depth = enccfg->g_input_bit_depth = 8;
  358. switch (avctx->pix_fmt) {
  359. case AV_PIX_FMT_YUV420P:
  360. case AV_PIX_FMT_YUVA420P:
  361. enccfg->g_profile = 0;
  362. *img_fmt = VPX_IMG_FMT_I420;
  363. return 0;
  364. case AV_PIX_FMT_YUV422P:
  365. enccfg->g_profile = 1;
  366. *img_fmt = VPX_IMG_FMT_I422;
  367. return 0;
  368. case AV_PIX_FMT_YUV440P:
  369. enccfg->g_profile = 1;
  370. *img_fmt = VPX_IMG_FMT_I440;
  371. return 0;
  372. case AV_PIX_FMT_GBRP:
  373. ctx->vpx_cs = VPX_CS_SRGB;
  374. case AV_PIX_FMT_YUV444P:
  375. enccfg->g_profile = 1;
  376. *img_fmt = VPX_IMG_FMT_I444;
  377. return 0;
  378. case AV_PIX_FMT_YUV420P10:
  379. case AV_PIX_FMT_YUV420P12:
  380. if (codec_caps & VPX_CODEC_CAP_HIGHBITDEPTH) {
  381. enccfg->g_bit_depth = enccfg->g_input_bit_depth =
  382. avctx->pix_fmt == AV_PIX_FMT_YUV420P10 ? 10 : 12;
  383. enccfg->g_profile = 2;
  384. *img_fmt = VPX_IMG_FMT_I42016;
  385. *flags |= VPX_CODEC_USE_HIGHBITDEPTH;
  386. return 0;
  387. }
  388. break;
  389. case AV_PIX_FMT_YUV422P10:
  390. case AV_PIX_FMT_YUV422P12:
  391. if (codec_caps & VPX_CODEC_CAP_HIGHBITDEPTH) {
  392. enccfg->g_bit_depth = enccfg->g_input_bit_depth =
  393. avctx->pix_fmt == AV_PIX_FMT_YUV422P10 ? 10 : 12;
  394. enccfg->g_profile = 3;
  395. *img_fmt = VPX_IMG_FMT_I42216;
  396. *flags |= VPX_CODEC_USE_HIGHBITDEPTH;
  397. return 0;
  398. }
  399. break;
  400. case AV_PIX_FMT_YUV440P10:
  401. case AV_PIX_FMT_YUV440P12:
  402. if (codec_caps & VPX_CODEC_CAP_HIGHBITDEPTH) {
  403. enccfg->g_bit_depth = enccfg->g_input_bit_depth =
  404. avctx->pix_fmt == AV_PIX_FMT_YUV440P10 ? 10 : 12;
  405. enccfg->g_profile = 3;
  406. *img_fmt = VPX_IMG_FMT_I44016;
  407. *flags |= VPX_CODEC_USE_HIGHBITDEPTH;
  408. return 0;
  409. }
  410. break;
  411. case AV_PIX_FMT_GBRP10:
  412. case AV_PIX_FMT_GBRP12:
  413. ctx->vpx_cs = VPX_CS_SRGB;
  414. case AV_PIX_FMT_YUV444P10:
  415. case AV_PIX_FMT_YUV444P12:
  416. if (codec_caps & VPX_CODEC_CAP_HIGHBITDEPTH) {
  417. enccfg->g_bit_depth = enccfg->g_input_bit_depth =
  418. avctx->pix_fmt == AV_PIX_FMT_YUV444P10 ||
  419. avctx->pix_fmt == AV_PIX_FMT_GBRP10 ? 10 : 12;
  420. enccfg->g_profile = 3;
  421. *img_fmt = VPX_IMG_FMT_I44416;
  422. *flags |= VPX_CODEC_USE_HIGHBITDEPTH;
  423. return 0;
  424. }
  425. break;
  426. default:
  427. break;
  428. }
  429. av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n");
  430. return AVERROR_INVALIDDATA;
  431. }
  432. static void set_colorspace(AVCodecContext *avctx)
  433. {
  434. enum vpx_color_space vpx_cs;
  435. VPxContext *ctx = avctx->priv_data;
  436. if (ctx->vpx_cs) {
  437. vpx_cs = ctx->vpx_cs;
  438. } else {
  439. switch (avctx->colorspace) {
  440. case AVCOL_SPC_RGB: vpx_cs = VPX_CS_SRGB; break;
  441. case AVCOL_SPC_BT709: vpx_cs = VPX_CS_BT_709; break;
  442. case AVCOL_SPC_UNSPECIFIED: vpx_cs = VPX_CS_UNKNOWN; break;
  443. case AVCOL_SPC_RESERVED: vpx_cs = VPX_CS_RESERVED; break;
  444. case AVCOL_SPC_BT470BG: vpx_cs = VPX_CS_BT_601; break;
  445. case AVCOL_SPC_SMPTE170M: vpx_cs = VPX_CS_SMPTE_170; break;
  446. case AVCOL_SPC_SMPTE240M: vpx_cs = VPX_CS_SMPTE_240; break;
  447. case AVCOL_SPC_BT2020_NCL: vpx_cs = VPX_CS_BT_2020; break;
  448. default:
  449. av_log(avctx, AV_LOG_WARNING, "Unsupported colorspace (%d)\n",
  450. avctx->colorspace);
  451. return;
  452. }
  453. }
  454. codecctl_int(avctx, VP9E_SET_COLOR_SPACE, vpx_cs);
  455. }
  456. #if VPX_ENCODER_ABI_VERSION >= 11
  457. static void set_color_range(AVCodecContext *avctx)
  458. {
  459. enum vpx_color_range vpx_cr;
  460. switch (avctx->color_range) {
  461. case AVCOL_RANGE_UNSPECIFIED:
  462. case AVCOL_RANGE_MPEG: vpx_cr = VPX_CR_STUDIO_RANGE; break;
  463. case AVCOL_RANGE_JPEG: vpx_cr = VPX_CR_FULL_RANGE; break;
  464. default:
  465. av_log(avctx, AV_LOG_WARNING, "Unsupported color range (%d)\n",
  466. avctx->color_range);
  467. return;
  468. }
  469. codecctl_int(avctx, VP9E_SET_COLOR_RANGE, vpx_cr);
  470. }
  471. #endif
  472. #endif
  473. static av_cold int vpx_init(AVCodecContext *avctx,
  474. const struct vpx_codec_iface *iface)
  475. {
  476. VPxContext *ctx = avctx->priv_data;
  477. struct vpx_codec_enc_cfg enccfg = { 0 };
  478. struct vpx_codec_enc_cfg enccfg_alpha;
  479. vpx_codec_flags_t flags = (avctx->flags & AV_CODEC_FLAG_PSNR) ? VPX_CODEC_USE_PSNR : 0;
  480. AVCPBProperties *cpb_props;
  481. int res;
  482. vpx_img_fmt_t img_fmt = VPX_IMG_FMT_I420;
  483. #if CONFIG_LIBVPX_VP9_ENCODER
  484. vpx_codec_caps_t codec_caps = vpx_codec_get_caps(iface);
  485. #endif
  486. av_log(avctx, AV_LOG_INFO, "%s\n", vpx_codec_version_str());
  487. av_log(avctx, AV_LOG_VERBOSE, "%s\n", vpx_codec_build_config());
  488. if (avctx->pix_fmt == AV_PIX_FMT_YUVA420P)
  489. ctx->is_alpha = 1;
  490. if ((res = vpx_codec_enc_config_default(iface, &enccfg, 0)) != VPX_CODEC_OK) {
  491. av_log(avctx, AV_LOG_ERROR, "Failed to get config: %s\n",
  492. vpx_codec_err_to_string(res));
  493. return AVERROR(EINVAL);
  494. }
  495. #if CONFIG_LIBVPX_VP9_ENCODER
  496. if (avctx->codec_id == AV_CODEC_ID_VP9) {
  497. if (set_pix_fmt(avctx, codec_caps, &enccfg, &flags, &img_fmt))
  498. return AVERROR(EINVAL);
  499. }
  500. #endif
  501. if(!avctx->bit_rate)
  502. if(avctx->rc_max_rate || avctx->rc_buffer_size || avctx->rc_initial_buffer_occupancy) {
  503. av_log( avctx, AV_LOG_ERROR, "Rate control parameters set without a bitrate\n");
  504. return AVERROR(EINVAL);
  505. }
  506. dump_enc_cfg(avctx, &enccfg);
  507. enccfg.g_w = avctx->width;
  508. enccfg.g_h = avctx->height;
  509. enccfg.g_timebase.num = avctx->time_base.num;
  510. enccfg.g_timebase.den = avctx->time_base.den;
  511. enccfg.g_threads =
  512. FFMIN(avctx->thread_count ? avctx->thread_count : av_cpu_count(), 16);
  513. enccfg.g_lag_in_frames= ctx->lag_in_frames;
  514. if (avctx->flags & AV_CODEC_FLAG_PASS1)
  515. enccfg.g_pass = VPX_RC_FIRST_PASS;
  516. else if (avctx->flags & AV_CODEC_FLAG_PASS2)
  517. enccfg.g_pass = VPX_RC_LAST_PASS;
  518. else
  519. enccfg.g_pass = VPX_RC_ONE_PASS;
  520. if (avctx->rc_min_rate == avctx->rc_max_rate &&
  521. avctx->rc_min_rate == avctx->bit_rate && avctx->bit_rate) {
  522. enccfg.rc_end_usage = VPX_CBR;
  523. } else if (ctx->crf >= 0) {
  524. enccfg.rc_end_usage = VPX_CQ;
  525. #if CONFIG_LIBVPX_VP9_ENCODER
  526. if (!avctx->bit_rate && avctx->codec_id == AV_CODEC_ID_VP9)
  527. enccfg.rc_end_usage = VPX_Q;
  528. #endif
  529. }
  530. if (avctx->bit_rate) {
  531. enccfg.rc_target_bitrate = av_rescale_rnd(avctx->bit_rate, 1, 1000,
  532. AV_ROUND_NEAR_INF);
  533. #if CONFIG_LIBVPX_VP9_ENCODER
  534. } else if (enccfg.rc_end_usage == VPX_Q) {
  535. #endif
  536. } else {
  537. if (enccfg.rc_end_usage == VPX_CQ) {
  538. enccfg.rc_target_bitrate = 1000000;
  539. } else {
  540. avctx->bit_rate = enccfg.rc_target_bitrate * 1000;
  541. av_log(avctx, AV_LOG_WARNING,
  542. "Neither bitrate nor constrained quality specified, using default bitrate of %dkbit/sec\n",
  543. enccfg.rc_target_bitrate);
  544. }
  545. }
  546. if (avctx->codec_id == AV_CODEC_ID_VP9 && ctx->lossless == 1) {
  547. enccfg.rc_min_quantizer =
  548. enccfg.rc_max_quantizer = 0;
  549. } else {
  550. if (avctx->qmin >= 0)
  551. enccfg.rc_min_quantizer = avctx->qmin;
  552. if (avctx->qmax >= 0)
  553. enccfg.rc_max_quantizer = avctx->qmax;
  554. }
  555. if (enccfg.rc_end_usage == VPX_CQ
  556. #if CONFIG_LIBVPX_VP9_ENCODER
  557. || enccfg.rc_end_usage == VPX_Q
  558. #endif
  559. ) {
  560. if (ctx->crf < enccfg.rc_min_quantizer || ctx->crf > enccfg.rc_max_quantizer) {
  561. av_log(avctx, AV_LOG_ERROR,
  562. "CQ level %d must be between minimum and maximum quantizer value (%d-%d)\n",
  563. ctx->crf, enccfg.rc_min_quantizer, enccfg.rc_max_quantizer);
  564. return AVERROR(EINVAL);
  565. }
  566. }
  567. #if FF_API_PRIVATE_OPT
  568. FF_DISABLE_DEPRECATION_WARNINGS
  569. if (avctx->frame_skip_threshold)
  570. ctx->drop_threshold = avctx->frame_skip_threshold;
  571. FF_ENABLE_DEPRECATION_WARNINGS
  572. #endif
  573. enccfg.rc_dropframe_thresh = ctx->drop_threshold;
  574. //0-100 (0 => CBR, 100 => VBR)
  575. enccfg.rc_2pass_vbr_bias_pct = lrint(avctx->qcompress * 100);
  576. if (avctx->bit_rate)
  577. enccfg.rc_2pass_vbr_minsection_pct =
  578. avctx->rc_min_rate * 100LL / avctx->bit_rate;
  579. if (avctx->rc_max_rate)
  580. enccfg.rc_2pass_vbr_maxsection_pct =
  581. avctx->rc_max_rate * 100LL / avctx->bit_rate;
  582. #if CONFIG_LIBVPX_VP9_ENCODER
  583. if (avctx->codec_id == AV_CODEC_ID_VP9) {
  584. #if VPX_ENCODER_ABI_VERSION >= 14
  585. if (ctx->corpus_complexity >= 0)
  586. enccfg.rc_2pass_vbr_corpus_complexity = ctx->corpus_complexity;
  587. #endif
  588. }
  589. #endif
  590. if (avctx->rc_buffer_size)
  591. enccfg.rc_buf_sz =
  592. avctx->rc_buffer_size * 1000LL / avctx->bit_rate;
  593. if (avctx->rc_initial_buffer_occupancy)
  594. enccfg.rc_buf_initial_sz =
  595. avctx->rc_initial_buffer_occupancy * 1000LL / avctx->bit_rate;
  596. enccfg.rc_buf_optimal_sz = enccfg.rc_buf_sz * 5 / 6;
  597. if (ctx->rc_undershoot_pct >= 0)
  598. enccfg.rc_undershoot_pct = ctx->rc_undershoot_pct;
  599. if (ctx->rc_overshoot_pct >= 0)
  600. enccfg.rc_overshoot_pct = ctx->rc_overshoot_pct;
  601. //_enc_init() will balk if kf_min_dist differs from max w/VPX_KF_AUTO
  602. if (avctx->keyint_min >= 0 && avctx->keyint_min == avctx->gop_size)
  603. enccfg.kf_min_dist = avctx->keyint_min;
  604. if (avctx->gop_size >= 0)
  605. enccfg.kf_max_dist = avctx->gop_size;
  606. if (enccfg.g_pass == VPX_RC_FIRST_PASS)
  607. enccfg.g_lag_in_frames = 0;
  608. else if (enccfg.g_pass == VPX_RC_LAST_PASS) {
  609. int decode_size, ret;
  610. if (!avctx->stats_in) {
  611. av_log(avctx, AV_LOG_ERROR, "No stats file for second pass\n");
  612. return AVERROR_INVALIDDATA;
  613. }
  614. ctx->twopass_stats.sz = strlen(avctx->stats_in) * 3 / 4;
  615. ret = av_reallocp(&ctx->twopass_stats.buf, ctx->twopass_stats.sz);
  616. if (ret < 0) {
  617. av_log(avctx, AV_LOG_ERROR,
  618. "Stat buffer alloc (%"SIZE_SPECIFIER" bytes) failed\n",
  619. ctx->twopass_stats.sz);
  620. ctx->twopass_stats.sz = 0;
  621. return ret;
  622. }
  623. decode_size = av_base64_decode(ctx->twopass_stats.buf, avctx->stats_in,
  624. ctx->twopass_stats.sz);
  625. if (decode_size < 0) {
  626. av_log(avctx, AV_LOG_ERROR, "Stat buffer decode failed\n");
  627. return AVERROR_INVALIDDATA;
  628. }
  629. ctx->twopass_stats.sz = decode_size;
  630. enccfg.rc_twopass_stats_in = ctx->twopass_stats;
  631. }
  632. /* 0-3: For non-zero values the encoder increasingly optimizes for reduced
  633. complexity playback on low powered devices at the expense of encode
  634. quality. */
  635. if (avctx->profile != FF_PROFILE_UNKNOWN)
  636. enccfg.g_profile = avctx->profile;
  637. enccfg.g_error_resilient = ctx->error_resilient || ctx->flags & VP8F_ERROR_RESILIENT;
  638. if (CONFIG_LIBVPX_VP8_ENCODER && avctx->codec_id == AV_CODEC_ID_VP8 && ctx->vp8_ts_parameters) {
  639. AVDictionary *dict = NULL;
  640. AVDictionaryEntry* en = NULL;
  641. if (!av_dict_parse_string(&dict, ctx->vp8_ts_parameters, "=", ":", 0)) {
  642. while ((en = av_dict_get(dict, "", en, AV_DICT_IGNORE_SUFFIX))) {
  643. if (vp8_ts_param_parse(&enccfg, en->key, en->value) < 0)
  644. av_log(avctx, AV_LOG_WARNING,
  645. "Error parsing option '%s = %s'.\n",
  646. en->key, en->value);
  647. }
  648. av_dict_free(&dict);
  649. }
  650. }
  651. dump_enc_cfg(avctx, &enccfg);
  652. /* Construct Encoder Context */
  653. res = vpx_codec_enc_init(&ctx->encoder, iface, &enccfg, flags);
  654. if (res != VPX_CODEC_OK) {
  655. log_encoder_error(avctx, "Failed to initialize encoder");
  656. return AVERROR(EINVAL);
  657. }
  658. if (ctx->is_alpha) {
  659. enccfg_alpha = enccfg;
  660. res = vpx_codec_enc_init(&ctx->encoder_alpha, iface, &enccfg_alpha, flags);
  661. if (res != VPX_CODEC_OK) {
  662. log_encoder_error(avctx, "Failed to initialize alpha encoder");
  663. return AVERROR(EINVAL);
  664. }
  665. }
  666. //codec control failures are currently treated only as warnings
  667. av_log(avctx, AV_LOG_DEBUG, "vpx_codec_control\n");
  668. codecctl_int(avctx, VP8E_SET_CPUUSED, ctx->cpu_used);
  669. if (ctx->flags & VP8F_AUTO_ALT_REF)
  670. ctx->auto_alt_ref = 1;
  671. if (ctx->auto_alt_ref >= 0)
  672. codecctl_int(avctx, VP8E_SET_ENABLEAUTOALTREF,
  673. avctx->codec_id == AV_CODEC_ID_VP8 ? !!ctx->auto_alt_ref : ctx->auto_alt_ref);
  674. if (ctx->arnr_max_frames >= 0)
  675. codecctl_int(avctx, VP8E_SET_ARNR_MAXFRAMES, ctx->arnr_max_frames);
  676. if (ctx->arnr_strength >= 0)
  677. codecctl_int(avctx, VP8E_SET_ARNR_STRENGTH, ctx->arnr_strength);
  678. if (ctx->arnr_type >= 0)
  679. codecctl_int(avctx, VP8E_SET_ARNR_TYPE, ctx->arnr_type);
  680. if (ctx->tune >= 0)
  681. codecctl_int(avctx, VP8E_SET_TUNING, ctx->tune);
  682. if (ctx->auto_alt_ref && ctx->is_alpha && avctx->codec_id == AV_CODEC_ID_VP8) {
  683. av_log(avctx, AV_LOG_ERROR, "Transparency encoding with auto_alt_ref does not work\n");
  684. return AVERROR(EINVAL);
  685. }
  686. if (ctx->sharpness >= 0)
  687. codecctl_int(avctx, VP8E_SET_SHARPNESS, ctx->sharpness);
  688. if (CONFIG_LIBVPX_VP8_ENCODER && avctx->codec_id == AV_CODEC_ID_VP8) {
  689. #if FF_API_PRIVATE_OPT
  690. FF_DISABLE_DEPRECATION_WARNINGS
  691. if (avctx->noise_reduction)
  692. ctx->noise_sensitivity = avctx->noise_reduction;
  693. FF_ENABLE_DEPRECATION_WARNINGS
  694. #endif
  695. codecctl_int(avctx, VP8E_SET_NOISE_SENSITIVITY, ctx->noise_sensitivity);
  696. codecctl_int(avctx, VP8E_SET_TOKEN_PARTITIONS, av_log2(avctx->slices));
  697. }
  698. codecctl_int(avctx, VP8E_SET_STATIC_THRESHOLD, ctx->static_thresh);
  699. if (ctx->crf >= 0)
  700. codecctl_int(avctx, VP8E_SET_CQ_LEVEL, ctx->crf);
  701. if (ctx->max_intra_rate >= 0)
  702. codecctl_int(avctx, VP8E_SET_MAX_INTRA_BITRATE_PCT, ctx->max_intra_rate);
  703. #if CONFIG_LIBVPX_VP9_ENCODER
  704. if (avctx->codec_id == AV_CODEC_ID_VP9) {
  705. if (ctx->lossless >= 0)
  706. codecctl_int(avctx, VP9E_SET_LOSSLESS, ctx->lossless);
  707. if (ctx->tile_columns >= 0)
  708. codecctl_int(avctx, VP9E_SET_TILE_COLUMNS, ctx->tile_columns);
  709. if (ctx->tile_rows >= 0)
  710. codecctl_int(avctx, VP9E_SET_TILE_ROWS, ctx->tile_rows);
  711. if (ctx->frame_parallel >= 0)
  712. codecctl_int(avctx, VP9E_SET_FRAME_PARALLEL_DECODING, ctx->frame_parallel);
  713. if (ctx->aq_mode >= 0)
  714. codecctl_int(avctx, VP9E_SET_AQ_MODE, ctx->aq_mode);
  715. set_colorspace(avctx);
  716. #if VPX_ENCODER_ABI_VERSION >= 11
  717. set_color_range(avctx);
  718. #endif
  719. #if VPX_ENCODER_ABI_VERSION >= 12
  720. codecctl_int(avctx, VP9E_SET_TARGET_LEVEL, ctx->level < 0 ? 255 : lrint(ctx->level * 10));
  721. #endif
  722. #ifdef VPX_CTRL_VP9E_SET_ROW_MT
  723. if (ctx->row_mt >= 0)
  724. codecctl_int(avctx, VP9E_SET_ROW_MT, ctx->row_mt);
  725. #endif
  726. #ifdef VPX_CTRL_VP9E_SET_TUNE_CONTENT
  727. if (ctx->tune_content >= 0)
  728. codecctl_int(avctx, VP9E_SET_TUNE_CONTENT, ctx->tune_content);
  729. #endif
  730. #ifdef VPX_CTRL_VP9E_SET_TPL
  731. if (ctx->tpl_model >= 0)
  732. codecctl_int(avctx, VP9E_SET_TPL, ctx->tpl_model);
  733. #endif
  734. }
  735. #endif
  736. av_log(avctx, AV_LOG_DEBUG, "Using deadline: %d\n", ctx->deadline);
  737. //provide dummy value to initialize wrapper, values will be updated each _encode()
  738. vpx_img_wrap(&ctx->rawimg, img_fmt, avctx->width, avctx->height, 1,
  739. (unsigned char*)1);
  740. #if CONFIG_LIBVPX_VP9_ENCODER
  741. if (avctx->codec_id == AV_CODEC_ID_VP9 && (codec_caps & VPX_CODEC_CAP_HIGHBITDEPTH))
  742. ctx->rawimg.bit_depth = enccfg.g_bit_depth;
  743. #endif
  744. if (ctx->is_alpha)
  745. vpx_img_wrap(&ctx->rawimg_alpha, VPX_IMG_FMT_I420, avctx->width, avctx->height, 1,
  746. (unsigned char*)1);
  747. cpb_props = ff_add_cpb_side_data(avctx);
  748. if (!cpb_props)
  749. return AVERROR(ENOMEM);
  750. if (enccfg.rc_end_usage == VPX_CBR ||
  751. enccfg.g_pass != VPX_RC_ONE_PASS) {
  752. cpb_props->max_bitrate = avctx->rc_max_rate;
  753. cpb_props->min_bitrate = avctx->rc_min_rate;
  754. cpb_props->avg_bitrate = avctx->bit_rate;
  755. }
  756. cpb_props->buffer_size = avctx->rc_buffer_size;
  757. return 0;
  758. }
  759. static inline void cx_pktcpy(struct FrameListData *dst,
  760. const struct vpx_codec_cx_pkt *src,
  761. const struct vpx_codec_cx_pkt *src_alpha,
  762. VPxContext *ctx)
  763. {
  764. dst->pts = src->data.frame.pts;
  765. dst->duration = src->data.frame.duration;
  766. dst->flags = src->data.frame.flags;
  767. dst->sz = src->data.frame.sz;
  768. dst->buf = src->data.frame.buf;
  769. dst->have_sse = 0;
  770. /* For alt-ref frame, don't store PSNR or increment frame_number */
  771. if (!(dst->flags & VPX_FRAME_IS_INVISIBLE)) {
  772. dst->frame_number = ++ctx->frame_number;
  773. dst->have_sse = ctx->have_sse;
  774. if (ctx->have_sse) {
  775. /* associate last-seen SSE to the frame. */
  776. /* Transfers ownership from ctx to dst. */
  777. /* WARNING! This makes the assumption that PSNR_PKT comes
  778. just before the frame it refers to! */
  779. memcpy(dst->sse, ctx->sse, sizeof(dst->sse));
  780. ctx->have_sse = 0;
  781. }
  782. } else {
  783. dst->frame_number = -1; /* sanity marker */
  784. }
  785. if (src_alpha) {
  786. dst->buf_alpha = src_alpha->data.frame.buf;
  787. dst->sz_alpha = src_alpha->data.frame.sz;
  788. } else {
  789. dst->buf_alpha = NULL;
  790. dst->sz_alpha = 0;
  791. }
  792. }
  793. /**
  794. * Store coded frame information in format suitable for return from encode2().
  795. *
  796. * Write information from @a cx_frame to @a pkt
  797. * @return packet data size on success
  798. * @return a negative AVERROR on error
  799. */
  800. static int storeframe(AVCodecContext *avctx, struct FrameListData *cx_frame,
  801. AVPacket *pkt)
  802. {
  803. int ret = ff_alloc_packet2(avctx, pkt, cx_frame->sz, 0);
  804. uint8_t *side_data;
  805. if (ret >= 0) {
  806. int pict_type;
  807. memcpy(pkt->data, cx_frame->buf, pkt->size);
  808. pkt->pts = pkt->dts = cx_frame->pts;
  809. #if FF_API_CODED_FRAME
  810. FF_DISABLE_DEPRECATION_WARNINGS
  811. avctx->coded_frame->pts = cx_frame->pts;
  812. avctx->coded_frame->key_frame = !!(cx_frame->flags & VPX_FRAME_IS_KEY);
  813. FF_ENABLE_DEPRECATION_WARNINGS
  814. #endif
  815. if (!!(cx_frame->flags & VPX_FRAME_IS_KEY)) {
  816. pict_type = AV_PICTURE_TYPE_I;
  817. #if FF_API_CODED_FRAME
  818. FF_DISABLE_DEPRECATION_WARNINGS
  819. avctx->coded_frame->pict_type = pict_type;
  820. FF_ENABLE_DEPRECATION_WARNINGS
  821. #endif
  822. pkt->flags |= AV_PKT_FLAG_KEY;
  823. } else {
  824. pict_type = AV_PICTURE_TYPE_P;
  825. #if FF_API_CODED_FRAME
  826. FF_DISABLE_DEPRECATION_WARNINGS
  827. avctx->coded_frame->pict_type = pict_type;
  828. FF_ENABLE_DEPRECATION_WARNINGS
  829. #endif
  830. }
  831. ff_side_data_set_encoder_stats(pkt, 0, cx_frame->sse + 1,
  832. cx_frame->have_sse ? 3 : 0, pict_type);
  833. if (cx_frame->have_sse) {
  834. int i;
  835. /* Beware of the Y/U/V/all order! */
  836. #if FF_API_CODED_FRAME
  837. FF_DISABLE_DEPRECATION_WARNINGS
  838. avctx->coded_frame->error[0] = cx_frame->sse[1];
  839. avctx->coded_frame->error[1] = cx_frame->sse[2];
  840. avctx->coded_frame->error[2] = cx_frame->sse[3];
  841. avctx->coded_frame->error[3] = 0; // alpha
  842. FF_ENABLE_DEPRECATION_WARNINGS
  843. #endif
  844. for (i = 0; i < 3; ++i) {
  845. avctx->error[i] += cx_frame->sse[i + 1];
  846. }
  847. cx_frame->have_sse = 0;
  848. }
  849. if (cx_frame->sz_alpha > 0) {
  850. side_data = av_packet_new_side_data(pkt,
  851. AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL,
  852. cx_frame->sz_alpha + 8);
  853. if(!side_data) {
  854. av_packet_unref(pkt);
  855. av_free(pkt);
  856. return AVERROR(ENOMEM);
  857. }
  858. AV_WB64(side_data, 1);
  859. memcpy(side_data + 8, cx_frame->buf_alpha, cx_frame->sz_alpha);
  860. }
  861. } else {
  862. return ret;
  863. }
  864. return pkt->size;
  865. }
  866. /**
  867. * Queue multiple output frames from the encoder, returning the front-most.
  868. * In cases where vpx_codec_get_cx_data() returns more than 1 frame append
  869. * the frame queue. Return the head frame if available.
  870. * @return Stored frame size
  871. * @return AVERROR(EINVAL) on output size error
  872. * @return AVERROR(ENOMEM) on coded frame queue data allocation error
  873. */
  874. static int queue_frames(AVCodecContext *avctx, AVPacket *pkt_out)
  875. {
  876. VPxContext *ctx = avctx->priv_data;
  877. const struct vpx_codec_cx_pkt *pkt;
  878. const struct vpx_codec_cx_pkt *pkt_alpha = NULL;
  879. const void *iter = NULL;
  880. const void *iter_alpha = NULL;
  881. int size = 0;
  882. if (ctx->coded_frame_list) {
  883. struct FrameListData *cx_frame = ctx->coded_frame_list;
  884. /* return the leading frame if we've already begun queueing */
  885. size = storeframe(avctx, cx_frame, pkt_out);
  886. if (size < 0)
  887. return size;
  888. ctx->coded_frame_list = cx_frame->next;
  889. free_coded_frame(cx_frame);
  890. }
  891. /* consume all available output from the encoder before returning. buffers
  892. are only good through the next vpx_codec call */
  893. while ((pkt = vpx_codec_get_cx_data(&ctx->encoder, &iter)) &&
  894. (!ctx->is_alpha ||
  895. (pkt_alpha = vpx_codec_get_cx_data(&ctx->encoder_alpha, &iter_alpha)))) {
  896. switch (pkt->kind) {
  897. case VPX_CODEC_CX_FRAME_PKT:
  898. if (!size) {
  899. struct FrameListData cx_frame;
  900. /* avoid storing the frame when the list is empty and we haven't yet
  901. provided a frame for output */
  902. av_assert0(!ctx->coded_frame_list);
  903. cx_pktcpy(&cx_frame, pkt, pkt_alpha, ctx);
  904. size = storeframe(avctx, &cx_frame, pkt_out);
  905. if (size < 0)
  906. return size;
  907. } else {
  908. struct FrameListData *cx_frame =
  909. av_malloc(sizeof(struct FrameListData));
  910. if (!cx_frame) {
  911. av_log(avctx, AV_LOG_ERROR,
  912. "Frame queue element alloc failed\n");
  913. return AVERROR(ENOMEM);
  914. }
  915. cx_pktcpy(cx_frame, pkt, pkt_alpha, ctx);
  916. cx_frame->buf = av_malloc(cx_frame->sz);
  917. if (!cx_frame->buf) {
  918. av_log(avctx, AV_LOG_ERROR,
  919. "Data buffer alloc (%"SIZE_SPECIFIER" bytes) failed\n",
  920. cx_frame->sz);
  921. av_freep(&cx_frame);
  922. return AVERROR(ENOMEM);
  923. }
  924. memcpy(cx_frame->buf, pkt->data.frame.buf, pkt->data.frame.sz);
  925. if (ctx->is_alpha) {
  926. cx_frame->buf_alpha = av_malloc(cx_frame->sz_alpha);
  927. if (!cx_frame->buf_alpha) {
  928. av_log(avctx, AV_LOG_ERROR,
  929. "Data buffer alloc (%"SIZE_SPECIFIER" bytes) failed\n",
  930. cx_frame->sz_alpha);
  931. av_free(cx_frame);
  932. return AVERROR(ENOMEM);
  933. }
  934. memcpy(cx_frame->buf_alpha, pkt_alpha->data.frame.buf, pkt_alpha->data.frame.sz);
  935. }
  936. coded_frame_add(&ctx->coded_frame_list, cx_frame);
  937. }
  938. break;
  939. case VPX_CODEC_STATS_PKT: {
  940. struct vpx_fixed_buf *stats = &ctx->twopass_stats;
  941. int err;
  942. if ((err = av_reallocp(&stats->buf,
  943. stats->sz +
  944. pkt->data.twopass_stats.sz)) < 0) {
  945. stats->sz = 0;
  946. av_log(avctx, AV_LOG_ERROR, "Stat buffer realloc failed\n");
  947. return err;
  948. }
  949. memcpy((uint8_t*)stats->buf + stats->sz,
  950. pkt->data.twopass_stats.buf, pkt->data.twopass_stats.sz);
  951. stats->sz += pkt->data.twopass_stats.sz;
  952. break;
  953. }
  954. case VPX_CODEC_PSNR_PKT:
  955. av_assert0(!ctx->have_sse);
  956. ctx->sse[0] = pkt->data.psnr.sse[0];
  957. ctx->sse[1] = pkt->data.psnr.sse[1];
  958. ctx->sse[2] = pkt->data.psnr.sse[2];
  959. ctx->sse[3] = pkt->data.psnr.sse[3];
  960. ctx->have_sse = 1;
  961. break;
  962. case VPX_CODEC_CUSTOM_PKT:
  963. //ignore unsupported/unrecognized packet types
  964. break;
  965. }
  966. }
  967. return size;
  968. }
  969. static int set_roi_map(AVCodecContext *avctx, const AVFrameSideData *sd, int frame_width, int frame_height,
  970. vpx_roi_map_t *roi_map, int block_size, int segment_cnt)
  971. {
  972. /**
  973. * range of vpx_roi_map_t.delta_q[i] is [-63, 63]
  974. */
  975. #define MAX_DELTA_Q 63
  976. const AVRegionOfInterest *roi = NULL;
  977. int nb_rois;
  978. uint32_t self_size;
  979. int segment_id;
  980. /* record the mapping from delta_q to "segment id + 1" in segment_mapping[].
  981. * the range of delta_q is [-MAX_DELTA_Q, MAX_DELTA_Q],
  982. * and its corresponding array index is [0, 2 * MAX_DELTA_Q],
  983. * and so the length of the mapping array is 2 * MAX_DELTA_Q + 1.
  984. * "segment id + 1", so we can say there's no mapping if the value of array element is zero.
  985. */
  986. int segment_mapping[2 * MAX_DELTA_Q + 1] = { 0 };
  987. memset(roi_map, 0, sizeof(*roi_map));
  988. /* segment id 0 in roi_map is reserved for the areas not covered by AVRegionOfInterest.
  989. * segment id 0 in roi_map is also for the areas with AVRegionOfInterest.qoffset near 0.
  990. * (delta_q of segment id 0 is 0).
  991. */
  992. segment_mapping[MAX_DELTA_Q] = 1;
  993. segment_id = 1;
  994. roi = (const AVRegionOfInterest*)sd->data;
  995. self_size = roi->self_size;
  996. if (!self_size || sd->size % self_size) {
  997. av_log(avctx, AV_LOG_ERROR, "Invalid AVRegionOfInterest.self_size.\n");
  998. return AVERROR(EINVAL);
  999. }
  1000. nb_rois = sd->size / self_size;
  1001. /* This list must be iterated from zero because regions are
  1002. * defined in order of decreasing importance. So discard less
  1003. * important areas if they exceed the segment count.
  1004. */
  1005. for (int i = 0; i < nb_rois; i++) {
  1006. int delta_q;
  1007. int mapping_index;
  1008. roi = (const AVRegionOfInterest*)(sd->data + self_size * i);
  1009. if (!roi->qoffset.den) {
  1010. av_log(avctx, AV_LOG_ERROR, "AVRegionOfInterest.qoffset.den must not be zero.\n");
  1011. return AVERROR(EINVAL);
  1012. }
  1013. delta_q = (int)(roi->qoffset.num * 1.0f / roi->qoffset.den * MAX_DELTA_Q);
  1014. delta_q = av_clip(delta_q, -MAX_DELTA_Q, MAX_DELTA_Q);
  1015. mapping_index = delta_q + MAX_DELTA_Q;
  1016. if (!segment_mapping[mapping_index]) {
  1017. if (segment_id == segment_cnt) {
  1018. av_log(avctx, AV_LOG_WARNING,
  1019. "ROI only supports %d segments (and segment 0 is reserved for non-ROIs), skipping the left ones.\n",
  1020. segment_cnt);
  1021. break;
  1022. }
  1023. segment_mapping[mapping_index] = segment_id + 1;
  1024. roi_map->delta_q[segment_id] = delta_q;
  1025. segment_id++;
  1026. }
  1027. }
  1028. roi_map->rows = (frame_height + block_size - 1) / block_size;
  1029. roi_map->cols = (frame_width + block_size - 1) / block_size;
  1030. roi_map->roi_map = av_mallocz_array(roi_map->rows * roi_map->cols, sizeof(*roi_map->roi_map));
  1031. if (!roi_map->roi_map) {
  1032. av_log(avctx, AV_LOG_ERROR, "roi_map alloc failed.\n");
  1033. return AVERROR(ENOMEM);
  1034. }
  1035. /* This list must be iterated in reverse, so for the case that
  1036. * two regions are overlapping, the more important area takes effect.
  1037. */
  1038. for (int i = nb_rois - 1; i >= 0; i--) {
  1039. int delta_q;
  1040. int mapping_value;
  1041. int starty, endy, startx, endx;
  1042. roi = (const AVRegionOfInterest*)(sd->data + self_size * i);
  1043. starty = av_clip(roi->top / block_size, 0, roi_map->rows);
  1044. endy = av_clip((roi->bottom + block_size - 1) / block_size, 0, roi_map->rows);
  1045. startx = av_clip(roi->left / block_size, 0, roi_map->cols);
  1046. endx = av_clip((roi->right + block_size - 1) / block_size, 0, roi_map->cols);
  1047. delta_q = (int)(roi->qoffset.num * 1.0f / roi->qoffset.den * MAX_DELTA_Q);
  1048. delta_q = av_clip(delta_q, -MAX_DELTA_Q, MAX_DELTA_Q);
  1049. mapping_value = segment_mapping[delta_q + MAX_DELTA_Q];
  1050. if (mapping_value) {
  1051. for (int y = starty; y < endy; y++)
  1052. for (int x = startx; x < endx; x++)
  1053. roi_map->roi_map[x + y * roi_map->cols] = mapping_value - 1;
  1054. }
  1055. }
  1056. return 0;
  1057. }
  1058. static int vp9_encode_set_roi(AVCodecContext *avctx, int frame_width, int frame_height, const AVFrameSideData *sd)
  1059. {
  1060. VPxContext *ctx = avctx->priv_data;
  1061. #ifdef VPX_CTRL_VP9E_SET_ROI_MAP
  1062. int version = vpx_codec_version();
  1063. int major = VPX_VERSION_MAJOR(version);
  1064. int minor = VPX_VERSION_MINOR(version);
  1065. int patch = VPX_VERSION_PATCH(version);
  1066. if (major > 1 || (major == 1 && minor > 8) || (major == 1 && minor == 8 && patch >= 1)) {
  1067. vpx_roi_map_t roi_map;
  1068. const int segment_cnt = 8;
  1069. const int block_size = 8;
  1070. int ret;
  1071. if (ctx->aq_mode > 0 || ctx->cpu_used < 5 || ctx->deadline != VPX_DL_REALTIME) {
  1072. if (!ctx->roi_warned) {
  1073. ctx->roi_warned = 1;
  1074. av_log(avctx, AV_LOG_WARNING, "ROI is only enabled when aq_mode is 0, cpu_used >= 5 "
  1075. "and deadline is REALTIME, so skipping ROI.\n");
  1076. return AVERROR(EINVAL);
  1077. }
  1078. }
  1079. ret = set_roi_map(avctx, sd, frame_width, frame_height, &roi_map, block_size, segment_cnt);
  1080. if (ret) {
  1081. log_encoder_error(avctx, "Failed to set_roi_map.\n");
  1082. return ret;
  1083. }
  1084. memset(roi_map.ref_frame, -1, sizeof(roi_map.ref_frame));
  1085. if (vpx_codec_control(&ctx->encoder, VP9E_SET_ROI_MAP, &roi_map)) {
  1086. log_encoder_error(avctx, "Failed to set VP9E_SET_ROI_MAP codec control.\n");
  1087. ret = AVERROR_INVALIDDATA;
  1088. }
  1089. av_freep(&roi_map.roi_map);
  1090. return ret;
  1091. }
  1092. #endif
  1093. if (!ctx->roi_warned) {
  1094. ctx->roi_warned = 1;
  1095. av_log(avctx, AV_LOG_WARNING, "ROI is not supported, please upgrade libvpx to version >= 1.8.1. "
  1096. "You may need to rebuild ffmpeg.\n");
  1097. }
  1098. return 0;
  1099. }
  1100. static int vp8_encode_set_roi(AVCodecContext *avctx, int frame_width, int frame_height, const AVFrameSideData *sd)
  1101. {
  1102. vpx_roi_map_t roi_map;
  1103. const int segment_cnt = 4;
  1104. const int block_size = 16;
  1105. VPxContext *ctx = avctx->priv_data;
  1106. int ret = set_roi_map(avctx, sd, frame_width, frame_height, &roi_map, block_size, segment_cnt);
  1107. if (ret) {
  1108. log_encoder_error(avctx, "Failed to set_roi_map.\n");
  1109. return ret;
  1110. }
  1111. if (vpx_codec_control(&ctx->encoder, VP8E_SET_ROI_MAP, &roi_map)) {
  1112. log_encoder_error(avctx, "Failed to set VP8E_SET_ROI_MAP codec control.\n");
  1113. ret = AVERROR_INVALIDDATA;
  1114. }
  1115. av_freep(&roi_map.roi_map);
  1116. return ret;
  1117. }
  1118. static int vpx_encode(AVCodecContext *avctx, AVPacket *pkt,
  1119. const AVFrame *frame, int *got_packet)
  1120. {
  1121. VPxContext *ctx = avctx->priv_data;
  1122. struct vpx_image *rawimg = NULL;
  1123. struct vpx_image *rawimg_alpha = NULL;
  1124. int64_t timestamp = 0;
  1125. int res, coded_size;
  1126. vpx_enc_frame_flags_t flags = 0;
  1127. if (frame) {
  1128. const AVFrameSideData *sd = av_frame_get_side_data(frame, AV_FRAME_DATA_REGIONS_OF_INTEREST);
  1129. rawimg = &ctx->rawimg;
  1130. rawimg->planes[VPX_PLANE_Y] = frame->data[0];
  1131. rawimg->planes[VPX_PLANE_U] = frame->data[1];
  1132. rawimg->planes[VPX_PLANE_V] = frame->data[2];
  1133. rawimg->stride[VPX_PLANE_Y] = frame->linesize[0];
  1134. rawimg->stride[VPX_PLANE_U] = frame->linesize[1];
  1135. rawimg->stride[VPX_PLANE_V] = frame->linesize[2];
  1136. if (ctx->is_alpha) {
  1137. uint8_t *u_plane, *v_plane;
  1138. rawimg_alpha = &ctx->rawimg_alpha;
  1139. rawimg_alpha->planes[VPX_PLANE_Y] = frame->data[3];
  1140. u_plane = av_malloc(frame->linesize[1] * frame->height);
  1141. v_plane = av_malloc(frame->linesize[2] * frame->height);
  1142. if (!u_plane || !v_plane) {
  1143. av_free(u_plane);
  1144. av_free(v_plane);
  1145. return AVERROR(ENOMEM);
  1146. }
  1147. memset(u_plane, 0x80, frame->linesize[1] * frame->height);
  1148. rawimg_alpha->planes[VPX_PLANE_U] = u_plane;
  1149. memset(v_plane, 0x80, frame->linesize[2] * frame->height);
  1150. rawimg_alpha->planes[VPX_PLANE_V] = v_plane;
  1151. rawimg_alpha->stride[VPX_PLANE_Y] = frame->linesize[0];
  1152. rawimg_alpha->stride[VPX_PLANE_U] = frame->linesize[1];
  1153. rawimg_alpha->stride[VPX_PLANE_V] = frame->linesize[2];
  1154. }
  1155. timestamp = frame->pts;
  1156. #if VPX_IMAGE_ABI_VERSION >= 4
  1157. switch (frame->color_range) {
  1158. case AVCOL_RANGE_MPEG:
  1159. rawimg->range = VPX_CR_STUDIO_RANGE;
  1160. break;
  1161. case AVCOL_RANGE_JPEG:
  1162. rawimg->range = VPX_CR_FULL_RANGE;
  1163. break;
  1164. }
  1165. #endif
  1166. if (frame->pict_type == AV_PICTURE_TYPE_I)
  1167. flags |= VPX_EFLAG_FORCE_KF;
  1168. if (CONFIG_LIBVPX_VP8_ENCODER && avctx->codec_id == AV_CODEC_ID_VP8 && frame->metadata) {
  1169. AVDictionaryEntry* en = av_dict_get(frame->metadata, "vp8-flags", NULL, 0);
  1170. if (en) {
  1171. flags |= strtoul(en->value, NULL, 10);
  1172. }
  1173. }
  1174. if (sd) {
  1175. if (avctx->codec_id == AV_CODEC_ID_VP8) {
  1176. vp8_encode_set_roi(avctx, frame->width, frame->height, sd);
  1177. } else {
  1178. vp9_encode_set_roi(avctx, frame->width, frame->height, sd);
  1179. }
  1180. }
  1181. }
  1182. res = vpx_codec_encode(&ctx->encoder, rawimg, timestamp,
  1183. avctx->ticks_per_frame, flags, ctx->deadline);
  1184. if (res != VPX_CODEC_OK) {
  1185. log_encoder_error(avctx, "Error encoding frame");
  1186. return AVERROR_INVALIDDATA;
  1187. }
  1188. if (ctx->is_alpha) {
  1189. res = vpx_codec_encode(&ctx->encoder_alpha, rawimg_alpha, timestamp,
  1190. avctx->ticks_per_frame, flags, ctx->deadline);
  1191. if (res != VPX_CODEC_OK) {
  1192. log_encoder_error(avctx, "Error encoding alpha frame");
  1193. return AVERROR_INVALIDDATA;
  1194. }
  1195. }
  1196. coded_size = queue_frames(avctx, pkt);
  1197. if (!frame && avctx->flags & AV_CODEC_FLAG_PASS1) {
  1198. unsigned int b64_size = AV_BASE64_SIZE(ctx->twopass_stats.sz);
  1199. avctx->stats_out = av_malloc(b64_size);
  1200. if (!avctx->stats_out) {
  1201. av_log(avctx, AV_LOG_ERROR, "Stat buffer alloc (%d bytes) failed\n",
  1202. b64_size);
  1203. return AVERROR(ENOMEM);
  1204. }
  1205. av_base64_encode(avctx->stats_out, b64_size, ctx->twopass_stats.buf,
  1206. ctx->twopass_stats.sz);
  1207. }
  1208. if (rawimg_alpha) {
  1209. av_freep(&rawimg_alpha->planes[VPX_PLANE_U]);
  1210. av_freep(&rawimg_alpha->planes[VPX_PLANE_V]);
  1211. }
  1212. *got_packet = !!coded_size;
  1213. return 0;
  1214. }
  1215. #define OFFSET(x) offsetof(VPxContext, x)
  1216. #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
  1217. #define COMMON_OPTIONS \
  1218. { "lag-in-frames", "Number of frames to look ahead for " \
  1219. "alternate reference frame selection", OFFSET(lag_in_frames), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, VE}, \
  1220. { "arnr-maxframes", "altref noise reduction max frame count", OFFSET(arnr_max_frames), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, VE}, \
  1221. { "arnr-strength", "altref noise reduction filter strength", OFFSET(arnr_strength), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, VE}, \
  1222. { "arnr-type", "altref noise reduction filter type", OFFSET(arnr_type), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, VE, "arnr_type"}, \
  1223. { "backward", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1}, 0, 0, VE, "arnr_type" }, \
  1224. { "forward", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2}, 0, 0, VE, "arnr_type" }, \
  1225. { "centered", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 3}, 0, 0, VE, "arnr_type" }, \
  1226. { "tune", "Tune the encoding to a specific scenario", OFFSET(tune), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, VE, "tune"}, \
  1227. { "psnr", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = VP8_TUNE_PSNR}, 0, 0, VE, "tune"}, \
  1228. { "ssim", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = VP8_TUNE_SSIM}, 0, 0, VE, "tune"}, \
  1229. { "deadline", "Time to spend encoding, in microseconds.", OFFSET(deadline), AV_OPT_TYPE_INT, {.i64 = VPX_DL_GOOD_QUALITY}, INT_MIN, INT_MAX, VE, "quality"}, \
  1230. { "best", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = VPX_DL_BEST_QUALITY}, 0, 0, VE, "quality"}, \
  1231. { "good", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = VPX_DL_GOOD_QUALITY}, 0, 0, VE, "quality"}, \
  1232. { "realtime", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = VPX_DL_REALTIME}, 0, 0, VE, "quality"}, \
  1233. { "error-resilient", "Error resilience configuration", OFFSET(error_resilient), AV_OPT_TYPE_FLAGS, {.i64 = 0}, INT_MIN, INT_MAX, VE, "er"}, \
  1234. { "max-intra-rate", "Maximum I-frame bitrate (pct) 0=unlimited", OFFSET(max_intra_rate), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, VE}, \
  1235. { "default", "Improve resiliency against losses of whole frames", 0, AV_OPT_TYPE_CONST, {.i64 = VPX_ERROR_RESILIENT_DEFAULT}, 0, 0, VE, "er"}, \
  1236. { "partitions", "The frame partitions are independently decodable " \
  1237. "by the bool decoder, meaning that partitions can be decoded even " \
  1238. "though earlier partitions have been lost. Note that intra predicition" \
  1239. " is still done over the partition boundary.", 0, AV_OPT_TYPE_CONST, {.i64 = VPX_ERROR_RESILIENT_PARTITIONS}, 0, 0, VE, "er"}, \
  1240. { "crf", "Select the quality for constant quality mode", offsetof(VPxContext, crf), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 63, VE }, \
  1241. { "static-thresh", "A change threshold on blocks below which they will be skipped by the encoder", OFFSET(static_thresh), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, VE }, \
  1242. { "drop-threshold", "Frame drop threshold", offsetof(VPxContext, drop_threshold), AV_OPT_TYPE_INT, {.i64 = 0 }, INT_MIN, INT_MAX, VE }, \
  1243. { "noise-sensitivity", "Noise sensitivity", OFFSET(noise_sensitivity), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 4, VE}, \
  1244. { "undershoot-pct", "Datarate undershoot (min) target (%)", OFFSET(rc_undershoot_pct), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 100, VE }, \
  1245. { "overshoot-pct", "Datarate overshoot (max) target (%)", OFFSET(rc_overshoot_pct), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 1000, VE }, \
  1246. #define LEGACY_OPTIONS \
  1247. {"speed", "", offsetof(VPxContext, cpu_used), AV_OPT_TYPE_INT, {.i64 = 1}, -16, 16, VE}, \
  1248. {"quality", "", offsetof(VPxContext, deadline), AV_OPT_TYPE_INT, {.i64 = VPX_DL_GOOD_QUALITY}, INT_MIN, INT_MAX, VE, "quality"}, \
  1249. {"vp8flags", "", offsetof(VPxContext, flags), AV_OPT_TYPE_FLAGS, {.i64 = 0}, 0, UINT_MAX, VE, "flags"}, \
  1250. {"error_resilient", "enable error resilience", 0, AV_OPT_TYPE_CONST, {.i64 = VP8F_ERROR_RESILIENT}, INT_MIN, INT_MAX, VE, "flags"}, \
  1251. {"altref", "enable use of alternate reference frames (VP8/2-pass only)", 0, AV_OPT_TYPE_CONST, {.i64 = VP8F_AUTO_ALT_REF}, INT_MIN, INT_MAX, VE, "flags"}, \
  1252. {"arnr_max_frames", "altref noise reduction max frame count", offsetof(VPxContext, arnr_max_frames), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 15, VE}, \
  1253. {"arnr_strength", "altref noise reduction filter strength", offsetof(VPxContext, arnr_strength), AV_OPT_TYPE_INT, {.i64 = 3}, 0, 6, VE}, \
  1254. {"arnr_type", "altref noise reduction filter type", offsetof(VPxContext, arnr_type), AV_OPT_TYPE_INT, {.i64 = 3}, 1, 3, VE}, \
  1255. {"rc_lookahead", "Number of frames to look ahead for alternate reference frame selection", offsetof(VPxContext, lag_in_frames), AV_OPT_TYPE_INT, {.i64 = 25}, 0, 25, VE}, \
  1256. {"sharpness", "Increase sharpness at the expense of lower PSNR", offsetof(VPxContext, sharpness), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 7, VE},
  1257. #if CONFIG_LIBVPX_VP8_ENCODER
  1258. static const AVOption vp8_options[] = {
  1259. COMMON_OPTIONS
  1260. { "auto-alt-ref", "Enable use of alternate reference "
  1261. "frames (2-pass only)", OFFSET(auto_alt_ref), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 2, VE},
  1262. { "cpu-used", "Quality/Speed ratio modifier", OFFSET(cpu_used), AV_OPT_TYPE_INT, {.i64 = 1}, -16, 16, VE},
  1263. { "ts-parameters", "Temporal scaling configuration using a "
  1264. ":-separated list of key=value parameters", OFFSET(vp8_ts_parameters), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, VE},
  1265. LEGACY_OPTIONS
  1266. { NULL }
  1267. };
  1268. #endif
  1269. #if CONFIG_LIBVPX_VP9_ENCODER
  1270. static const AVOption vp9_options[] = {
  1271. COMMON_OPTIONS
  1272. { "auto-alt-ref", "Enable use of alternate reference "
  1273. "frames (2-pass only)", OFFSET(auto_alt_ref), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 6, VE},
  1274. { "cpu-used", "Quality/Speed ratio modifier", OFFSET(cpu_used), AV_OPT_TYPE_INT, {.i64 = 1}, -8, 8, VE},
  1275. { "lossless", "Lossless mode", OFFSET(lossless), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 1, VE},
  1276. { "tile-columns", "Number of tile columns to use, log2", OFFSET(tile_columns), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 6, VE},
  1277. { "tile-rows", "Number of tile rows to use, log2", OFFSET(tile_rows), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 2, VE},
  1278. { "frame-parallel", "Enable frame parallel decodability features", OFFSET(frame_parallel), AV_OPT_TYPE_BOOL,{.i64 = -1}, -1, 1, VE},
  1279. #if VPX_ENCODER_ABI_VERSION >= 12
  1280. { "aq-mode", "adaptive quantization mode", OFFSET(aq_mode), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 4, VE, "aq_mode"},
  1281. #else
  1282. { "aq-mode", "adaptive quantization mode", OFFSET(aq_mode), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 3, VE, "aq_mode"},
  1283. #endif
  1284. { "none", "Aq not used", 0, AV_OPT_TYPE_CONST, {.i64 = 0}, 0, 0, VE, "aq_mode" },
  1285. { "variance", "Variance based Aq", 0, AV_OPT_TYPE_CONST, {.i64 = 1}, 0, 0, VE, "aq_mode" },
  1286. { "complexity", "Complexity based Aq", 0, AV_OPT_TYPE_CONST, {.i64 = 2}, 0, 0, VE, "aq_mode" },
  1287. { "cyclic", "Cyclic Refresh Aq", 0, AV_OPT_TYPE_CONST, {.i64 = 3}, 0, 0, VE, "aq_mode" },
  1288. #if VPX_ENCODER_ABI_VERSION >= 12
  1289. { "equator360", "360 video Aq", 0, AV_OPT_TYPE_CONST, {.i64 = 4}, 0, 0, VE, "aq_mode" },
  1290. {"level", "Specify level", OFFSET(level), AV_OPT_TYPE_FLOAT, {.dbl=-1}, -1, 6.2, VE},
  1291. #endif
  1292. #ifdef VPX_CTRL_VP9E_SET_ROW_MT
  1293. {"row-mt", "Row based multi-threading", OFFSET(row_mt), AV_OPT_TYPE_BOOL, {.i64 = -1}, -1, 1, VE},
  1294. #endif
  1295. #ifdef VPX_CTRL_VP9E_SET_TUNE_CONTENT
  1296. #if VPX_ENCODER_ABI_VERSION >= 14
  1297. { "tune-content", "Tune content type", OFFSET(tune_content), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 2, VE, "tune_content" },
  1298. #else
  1299. { "tune-content", "Tune content type", OFFSET(tune_content), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 1, VE, "tune_content" },
  1300. #endif
  1301. { "default", "Regular video content", 0, AV_OPT_TYPE_CONST, {.i64 = 0}, 0, 0, VE, "tune_content" },
  1302. { "screen", "Screen capture content", 0, AV_OPT_TYPE_CONST, {.i64 = 1}, 0, 0, VE, "tune_content" },
  1303. #if VPX_ENCODER_ABI_VERSION >= 14
  1304. { "film", "Film content; improves grain retention", 0, AV_OPT_TYPE_CONST, {.i64 = 2}, 0, 0, VE, "tune_content" },
  1305. #endif
  1306. #endif
  1307. #if VPX_ENCODER_ABI_VERSION >= 14
  1308. { "corpus-complexity", "corpus vbr complexity midpoint", OFFSET(corpus_complexity), AV_OPT_TYPE_INT, {.i64 = -1}, -1, 10000, VE },
  1309. #endif
  1310. #ifdef VPX_CTRL_VP9E_SET_TPL
  1311. { "enable-tpl", "Enable temporal dependency model", OFFSET(tpl_model), AV_OPT_TYPE_BOOL, {.i64 = -1}, -1, 1, VE },
  1312. #endif
  1313. LEGACY_OPTIONS
  1314. { NULL }
  1315. };
  1316. #endif
  1317. #undef COMMON_OPTIONS
  1318. #undef LEGACY_OPTIONS
  1319. static const AVCodecDefault defaults[] = {
  1320. { "qmin", "-1" },
  1321. { "qmax", "-1" },
  1322. { "g", "-1" },
  1323. { "keyint_min", "-1" },
  1324. { NULL },
  1325. };
  1326. #if CONFIG_LIBVPX_VP8_ENCODER
  1327. static av_cold int vp8_init(AVCodecContext *avctx)
  1328. {
  1329. return vpx_init(avctx, vpx_codec_vp8_cx());
  1330. }
  1331. static const AVClass class_vp8 = {
  1332. .class_name = "libvpx-vp8 encoder",
  1333. .item_name = av_default_item_name,
  1334. .option = vp8_options,
  1335. .version = LIBAVUTIL_VERSION_INT,
  1336. };
  1337. AVCodec ff_libvpx_vp8_encoder = {
  1338. .name = "libvpx",
  1339. .long_name = NULL_IF_CONFIG_SMALL("libvpx VP8"),
  1340. .type = AVMEDIA_TYPE_VIDEO,
  1341. .id = AV_CODEC_ID_VP8,
  1342. .priv_data_size = sizeof(VPxContext),
  1343. .init = vp8_init,
  1344. .encode2 = vpx_encode,
  1345. .close = vpx_free,
  1346. .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AUTO_THREADS,
  1347. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUVA420P, AV_PIX_FMT_NONE },
  1348. .priv_class = &class_vp8,
  1349. .defaults = defaults,
  1350. .wrapper_name = "libvpx",
  1351. };
  1352. #endif /* CONFIG_LIBVPX_VP8_ENCODER */
  1353. #if CONFIG_LIBVPX_VP9_ENCODER
  1354. static av_cold int vp9_init(AVCodecContext *avctx)
  1355. {
  1356. return vpx_init(avctx, vpx_codec_vp9_cx());
  1357. }
  1358. static const AVClass class_vp9 = {
  1359. .class_name = "libvpx-vp9 encoder",
  1360. .item_name = av_default_item_name,
  1361. .option = vp9_options,
  1362. .version = LIBAVUTIL_VERSION_INT,
  1363. };
  1364. AVCodec ff_libvpx_vp9_encoder = {
  1365. .name = "libvpx-vp9",
  1366. .long_name = NULL_IF_CONFIG_SMALL("libvpx VP9"),
  1367. .type = AVMEDIA_TYPE_VIDEO,
  1368. .id = AV_CODEC_ID_VP9,
  1369. .priv_data_size = sizeof(VPxContext),
  1370. .init = vp9_init,
  1371. .encode2 = vpx_encode,
  1372. .close = vpx_free,
  1373. .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AUTO_THREADS,
  1374. .profiles = NULL_IF_CONFIG_SMALL(ff_vp9_profiles),
  1375. .priv_class = &class_vp9,
  1376. .defaults = defaults,
  1377. .init_static_data = ff_vp9_init_static,
  1378. .wrapper_name = "libvpx",
  1379. };
  1380. #endif /* CONFIG_LIBVPX_VP9_ENCODER */