You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1038 lines
37KB

  1. /*
  2. * This file is part of Libav.
  3. *
  4. * Libav is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * Libav is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with Libav; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <string.h>
  19. #include <va/va.h>
  20. #include <va/va_enc_h264.h>
  21. #include "libavutil/avassert.h"
  22. #include "libavutil/common.h"
  23. #include "libavutil/internal.h"
  24. #include "libavutil/opt.h"
  25. #include "avcodec.h"
  26. #include "cbs.h"
  27. #include "cbs_h264.h"
  28. #include "h264.h"
  29. #include "h264_sei.h"
  30. #include "internal.h"
  31. #include "vaapi_encode.h"
  32. enum {
  33. SEI_TIMING = 0x01,
  34. SEI_IDENTIFIER = 0x02,
  35. SEI_RECOVERY_POINT = 0x04,
  36. };
  37. // Random (version 4) ISO 11578 UUID.
  38. static const uint8_t vaapi_encode_h264_sei_identifier_uuid[16] = {
  39. 0x59, 0x94, 0x8b, 0x28, 0x11, 0xec, 0x45, 0xaf,
  40. 0x96, 0x75, 0x19, 0xd4, 0x1f, 0xea, 0xa9, 0x4d,
  41. };
  42. typedef struct VAAPIEncodeH264Context {
  43. int mb_width;
  44. int mb_height;
  45. int fixed_qp_idr;
  46. int fixed_qp_p;
  47. int fixed_qp_b;
  48. H264RawAUD aud;
  49. H264RawSPS sps;
  50. H264RawPPS pps;
  51. H264RawSEI sei;
  52. H264RawSlice slice;
  53. H264RawSEIBufferingPeriod buffering_period;
  54. H264RawSEIPicTiming pic_timing;
  55. H264RawSEIRecoveryPoint recovery_point;
  56. H264RawSEIUserDataUnregistered identifier;
  57. char *identifier_string;
  58. int frame_num;
  59. int pic_order_cnt;
  60. int next_frame_num;
  61. int64_t last_idr_frame;
  62. int64_t idr_pic_count;
  63. int primary_pic_type;
  64. int slice_type;
  65. int cpb_delay;
  66. int dpb_delay;
  67. CodedBitstreamContext cbc;
  68. CodedBitstreamFragment current_access_unit;
  69. int aud_needed;
  70. int sei_needed;
  71. int sei_cbr_workaround_needed;
  72. } VAAPIEncodeH264Context;
  73. typedef struct VAAPIEncodeH264Options {
  74. int qp;
  75. int quality;
  76. int low_power;
  77. int aud;
  78. int sei;
  79. } VAAPIEncodeH264Options;
  80. static int vaapi_encode_h264_write_access_unit(AVCodecContext *avctx,
  81. char *data, size_t *data_len,
  82. CodedBitstreamFragment *au)
  83. {
  84. VAAPIEncodeContext *ctx = avctx->priv_data;
  85. VAAPIEncodeH264Context *priv = ctx->priv_data;
  86. int err;
  87. err = ff_cbs_write_fragment_data(&priv->cbc, au);
  88. if (err < 0) {
  89. av_log(avctx, AV_LOG_ERROR, "Failed to write packed header.\n");
  90. return err;
  91. }
  92. if (*data_len < 8 * au->data_size - au->data_bit_padding) {
  93. av_log(avctx, AV_LOG_ERROR, "Access unit too large: "
  94. "%zu < %zu.\n", *data_len,
  95. 8 * au->data_size - au->data_bit_padding);
  96. return AVERROR(ENOSPC);
  97. }
  98. memcpy(data, au->data, au->data_size);
  99. *data_len = 8 * au->data_size - au->data_bit_padding;
  100. return 0;
  101. }
  102. static int vaapi_encode_h264_add_nal(AVCodecContext *avctx,
  103. CodedBitstreamFragment *au,
  104. void *nal_unit)
  105. {
  106. VAAPIEncodeContext *ctx = avctx->priv_data;
  107. VAAPIEncodeH264Context *priv = ctx->priv_data;
  108. H264RawNALUnitHeader *header = nal_unit;
  109. int err;
  110. err = ff_cbs_insert_unit_content(&priv->cbc, au, -1,
  111. header->nal_unit_type, nal_unit);
  112. if (err < 0) {
  113. av_log(avctx, AV_LOG_ERROR, "Failed to add NAL unit: "
  114. "type = %d.\n", header->nal_unit_type);
  115. return err;
  116. }
  117. return 0;
  118. }
  119. static int vaapi_encode_h264_write_sequence_header(AVCodecContext *avctx,
  120. char *data, size_t *data_len)
  121. {
  122. VAAPIEncodeContext *ctx = avctx->priv_data;
  123. VAAPIEncodeH264Context *priv = ctx->priv_data;
  124. CodedBitstreamFragment *au = &priv->current_access_unit;
  125. int err;
  126. if (priv->aud_needed) {
  127. err = vaapi_encode_h264_add_nal(avctx, au, &priv->aud);
  128. if (err < 0)
  129. goto fail;
  130. priv->aud_needed = 0;
  131. }
  132. err = vaapi_encode_h264_add_nal(avctx, au, &priv->sps);
  133. if (err < 0)
  134. goto fail;
  135. err = vaapi_encode_h264_add_nal(avctx, au, &priv->pps);
  136. if (err < 0)
  137. goto fail;
  138. err = vaapi_encode_h264_write_access_unit(avctx, data, data_len, au);
  139. fail:
  140. ff_cbs_fragment_uninit(&priv->cbc, au);
  141. return err;
  142. }
  143. static int vaapi_encode_h264_write_slice_header(AVCodecContext *avctx,
  144. VAAPIEncodePicture *pic,
  145. VAAPIEncodeSlice *slice,
  146. char *data, size_t *data_len)
  147. {
  148. VAAPIEncodeContext *ctx = avctx->priv_data;
  149. VAAPIEncodeH264Context *priv = ctx->priv_data;
  150. CodedBitstreamFragment *au = &priv->current_access_unit;
  151. int err;
  152. if (priv->aud_needed) {
  153. err = vaapi_encode_h264_add_nal(avctx, au, &priv->aud);
  154. if (err < 0)
  155. goto fail;
  156. priv->aud_needed = 0;
  157. }
  158. err = vaapi_encode_h264_add_nal(avctx, au, &priv->slice);
  159. if (err < 0)
  160. goto fail;
  161. err = vaapi_encode_h264_write_access_unit(avctx, data, data_len, au);
  162. fail:
  163. ff_cbs_fragment_uninit(&priv->cbc, au);
  164. return err;
  165. }
  166. static int vaapi_encode_h264_write_extra_header(AVCodecContext *avctx,
  167. VAAPIEncodePicture *pic,
  168. int index, int *type,
  169. char *data, size_t *data_len)
  170. {
  171. VAAPIEncodeContext *ctx = avctx->priv_data;
  172. VAAPIEncodeH264Context *priv = ctx->priv_data;
  173. VAAPIEncodeH264Options *opt = ctx->codec_options;
  174. CodedBitstreamFragment *au = &priv->current_access_unit;
  175. int err, i;
  176. if (priv->sei_needed) {
  177. if (priv->aud_needed) {
  178. vaapi_encode_h264_add_nal(avctx, au, &priv->aud);
  179. priv->aud_needed = 0;
  180. }
  181. memset(&priv->sei, 0, sizeof(priv->sei));
  182. priv->sei.nal_unit_header.nal_unit_type = H264_NAL_SEI;
  183. i = 0;
  184. if (pic->encode_order == 0 && opt->sei & SEI_IDENTIFIER) {
  185. priv->sei.payload[i].payload_type = H264_SEI_TYPE_USER_DATA_UNREGISTERED;
  186. priv->sei.payload[i].payload.user_data_unregistered = priv->identifier;
  187. ++i;
  188. }
  189. if (opt->sei & SEI_TIMING) {
  190. if (pic->type == PICTURE_TYPE_IDR) {
  191. priv->sei.payload[i].payload_type = H264_SEI_TYPE_BUFFERING_PERIOD;
  192. priv->sei.payload[i].payload.buffering_period = priv->buffering_period;
  193. ++i;
  194. }
  195. priv->sei.payload[i].payload_type = H264_SEI_TYPE_PIC_TIMING;
  196. priv->sei.payload[i].payload.pic_timing = priv->pic_timing;
  197. ++i;
  198. }
  199. if (opt->sei & SEI_RECOVERY_POINT && pic->type == PICTURE_TYPE_I) {
  200. priv->sei.payload[i].payload_type = H264_SEI_TYPE_RECOVERY_POINT;
  201. priv->sei.payload[i].payload.recovery_point = priv->recovery_point;
  202. ++i;
  203. }
  204. priv->sei.payload_count = i;
  205. av_assert0(priv->sei.payload_count > 0);
  206. err = vaapi_encode_h264_add_nal(avctx, au, &priv->sei);
  207. if (err < 0)
  208. goto fail;
  209. priv->sei_needed = 0;
  210. err = vaapi_encode_h264_write_access_unit(avctx, data, data_len, au);
  211. if (err < 0)
  212. goto fail;
  213. ff_cbs_fragment_uninit(&priv->cbc, au);
  214. *type = VAEncPackedHeaderH264_SEI;
  215. return 0;
  216. #if !HAVE_VAAPI_1
  217. } else if (priv->sei_cbr_workaround_needed) {
  218. // Insert a zero-length header using the old SEI type. This is
  219. // required to avoid triggering broken behaviour on Intel platforms
  220. // in CBR mode where an invalid SEI message is generated by the
  221. // driver and inserted into the stream.
  222. *data_len = 0;
  223. *type = VAEncPackedHeaderH264_SEI;
  224. priv->sei_cbr_workaround_needed = 0;
  225. return 0;
  226. #endif
  227. } else {
  228. return AVERROR_EOF;
  229. }
  230. fail:
  231. ff_cbs_fragment_uninit(&priv->cbc, au);
  232. return err;
  233. }
  234. static int vaapi_encode_h264_init_sequence_params(AVCodecContext *avctx)
  235. {
  236. VAAPIEncodeContext *ctx = avctx->priv_data;
  237. VAAPIEncodeH264Context *priv = ctx->priv_data;
  238. VAAPIEncodeH264Options *opt = ctx->codec_options;
  239. H264RawSPS *sps = &priv->sps;
  240. H264RawPPS *pps = &priv->pps;
  241. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  242. VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params;
  243. memset(&priv->current_access_unit, 0,
  244. sizeof(priv->current_access_unit));
  245. memset(sps, 0, sizeof(*sps));
  246. memset(pps, 0, sizeof(*pps));
  247. sps->nal_unit_header.nal_ref_idc = 3;
  248. sps->nal_unit_header.nal_unit_type = H264_NAL_SPS;
  249. sps->profile_idc = avctx->profile & 0xff;
  250. sps->constraint_set1_flag =
  251. !!(avctx->profile & FF_PROFILE_H264_CONSTRAINED);
  252. sps->constraint_set3_flag =
  253. !!(avctx->profile & FF_PROFILE_H264_INTRA);
  254. sps->level_idc = avctx->level;
  255. sps->seq_parameter_set_id = 0;
  256. sps->chroma_format_idc = 1;
  257. sps->log2_max_frame_num_minus4 = 4;
  258. sps->pic_order_cnt_type = 0;
  259. sps->log2_max_pic_order_cnt_lsb_minus4 =
  260. av_clip(av_log2(ctx->b_per_p + 1) - 2, 0, 12);
  261. sps->max_num_ref_frames =
  262. (avctx->profile & FF_PROFILE_H264_INTRA) ? 0 :
  263. 1 + (ctx->b_per_p > 0);
  264. sps->pic_width_in_mbs_minus1 = priv->mb_width - 1;
  265. sps->pic_height_in_map_units_minus1 = priv->mb_height - 1;
  266. sps->frame_mbs_only_flag = 1;
  267. sps->direct_8x8_inference_flag = 1;
  268. if (avctx->width != 16 * priv->mb_width ||
  269. avctx->height != 16 * priv->mb_height) {
  270. sps->frame_cropping_flag = 1;
  271. sps->frame_crop_left_offset = 0;
  272. sps->frame_crop_right_offset =
  273. (16 * priv->mb_width - avctx->width) / 2;
  274. sps->frame_crop_top_offset = 0;
  275. sps->frame_crop_bottom_offset =
  276. (16 * priv->mb_height - avctx->height) / 2;
  277. } else {
  278. sps->frame_cropping_flag = 0;
  279. }
  280. sps->vui_parameters_present_flag = 1;
  281. if (avctx->sample_aspect_ratio.num != 0 &&
  282. avctx->sample_aspect_ratio.den != 0) {
  283. static const AVRational sar_idc[] = {
  284. { 0, 0 },
  285. { 1, 1 }, { 12, 11 }, { 10, 11 }, { 16, 11 },
  286. { 40, 33 }, { 24, 11 }, { 20, 11 }, { 32, 11 },
  287. { 80, 33 }, { 18, 11 }, { 15, 11 }, { 64, 33 },
  288. { 160, 99 }, { 4, 3 }, { 3, 2 }, { 2, 1 },
  289. };
  290. int i;
  291. for (i = 0; i < FF_ARRAY_ELEMS(sar_idc); i++) {
  292. if (avctx->sample_aspect_ratio.num == sar_idc[i].num &&
  293. avctx->sample_aspect_ratio.den == sar_idc[i].den) {
  294. sps->vui.aspect_ratio_idc = i;
  295. break;
  296. }
  297. }
  298. if (i >= FF_ARRAY_ELEMS(sar_idc)) {
  299. sps->vui.aspect_ratio_idc = 255;
  300. sps->vui.sar_width = avctx->sample_aspect_ratio.num;
  301. sps->vui.sar_height = avctx->sample_aspect_ratio.den;
  302. }
  303. sps->vui.aspect_ratio_info_present_flag = 1;
  304. }
  305. if (avctx->color_range != AVCOL_RANGE_UNSPECIFIED ||
  306. avctx->color_primaries != AVCOL_PRI_UNSPECIFIED ||
  307. avctx->color_trc != AVCOL_TRC_UNSPECIFIED ||
  308. avctx->colorspace != AVCOL_SPC_UNSPECIFIED) {
  309. sps->vui.video_signal_type_present_flag = 1;
  310. sps->vui.video_format = 5; // Unspecified.
  311. sps->vui.video_full_range_flag =
  312. avctx->color_range == AVCOL_RANGE_JPEG;
  313. if (avctx->color_primaries != AVCOL_PRI_UNSPECIFIED ||
  314. avctx->color_trc != AVCOL_TRC_UNSPECIFIED ||
  315. avctx->colorspace != AVCOL_SPC_UNSPECIFIED) {
  316. sps->vui.colour_description_present_flag = 1;
  317. sps->vui.colour_primaries = avctx->color_primaries;
  318. sps->vui.transfer_characteristics = avctx->color_trc;
  319. sps->vui.matrix_coefficients = avctx->colorspace;
  320. }
  321. } else {
  322. sps->vui.video_format = 5;
  323. sps->vui.video_full_range_flag = 0;
  324. sps->vui.colour_primaries = avctx->color_primaries;
  325. sps->vui.transfer_characteristics = avctx->color_trc;
  326. sps->vui.matrix_coefficients = avctx->colorspace;
  327. }
  328. if (avctx->chroma_sample_location != AVCHROMA_LOC_UNSPECIFIED) {
  329. sps->vui.chroma_loc_info_present_flag = 1;
  330. sps->vui.chroma_sample_loc_type_top_field =
  331. sps->vui.chroma_sample_loc_type_bottom_field =
  332. avctx->chroma_sample_location - 1;
  333. }
  334. sps->vui.timing_info_present_flag = 1;
  335. if (avctx->framerate.num > 0 && avctx->framerate.den > 0) {
  336. sps->vui.num_units_in_tick = avctx->framerate.den;
  337. sps->vui.time_scale = 2 * avctx->framerate.num;
  338. sps->vui.fixed_frame_rate_flag = 1;
  339. } else {
  340. sps->vui.num_units_in_tick = avctx->time_base.num;
  341. sps->vui.time_scale = 2 * avctx->time_base.den;
  342. sps->vui.fixed_frame_rate_flag = 0;
  343. }
  344. if (opt->sei & SEI_TIMING) {
  345. H264RawHRD *hrd = &sps->vui.nal_hrd_parameters;
  346. sps->vui.nal_hrd_parameters_present_flag = 1;
  347. hrd->cpb_cnt_minus1 = 0;
  348. // Try to scale these to a sensible range so that the
  349. // golomb encode of the value is not overlong.
  350. hrd->bit_rate_scale =
  351. av_clip_uintp2(av_log2(avctx->bit_rate) - 15 - 6, 4);
  352. hrd->bit_rate_value_minus1[0] =
  353. (avctx->bit_rate >> hrd->bit_rate_scale + 6) - 1;
  354. hrd->cpb_size_scale =
  355. av_clip_uintp2(av_log2(ctx->hrd_params.hrd.buffer_size) - 15 - 4, 4);
  356. hrd->cpb_size_value_minus1[0] =
  357. (ctx->hrd_params.hrd.buffer_size >> hrd->cpb_size_scale + 4) - 1;
  358. // CBR mode as defined for the HRD cannot be achieved without filler
  359. // data, so this flag cannot be set even with VAAPI CBR modes.
  360. hrd->cbr_flag[0] = 0;
  361. hrd->initial_cpb_removal_delay_length_minus1 = 23;
  362. hrd->cpb_removal_delay_length_minus1 = 23;
  363. hrd->dpb_output_delay_length_minus1 = 7;
  364. hrd->time_offset_length = 0;
  365. priv->buffering_period.seq_parameter_set_id = sps->seq_parameter_set_id;
  366. // This calculation can easily overflow 32 bits.
  367. priv->buffering_period.nal.initial_cpb_removal_delay[0] = 90000 *
  368. (uint64_t)ctx->hrd_params.hrd.initial_buffer_fullness /
  369. ctx->hrd_params.hrd.buffer_size;
  370. priv->buffering_period.nal.initial_cpb_removal_delay_offset[0] = 0;
  371. } else {
  372. sps->vui.nal_hrd_parameters_present_flag = 0;
  373. sps->vui.low_delay_hrd_flag = 1 - sps->vui.fixed_frame_rate_flag;
  374. }
  375. sps->vui.bitstream_restriction_flag = 1;
  376. sps->vui.motion_vectors_over_pic_boundaries_flag = 1;
  377. sps->vui.log2_max_mv_length_horizontal = 16;
  378. sps->vui.log2_max_mv_length_vertical = 16;
  379. sps->vui.max_num_reorder_frames = (ctx->b_per_p > 0);
  380. sps->vui.max_dec_frame_buffering = vseq->max_num_ref_frames;
  381. pps->nal_unit_header.nal_ref_idc = 3;
  382. pps->nal_unit_header.nal_unit_type = H264_NAL_PPS;
  383. pps->pic_parameter_set_id = 0;
  384. pps->seq_parameter_set_id = 0;
  385. pps->entropy_coding_mode_flag =
  386. !(sps->profile_idc == FF_PROFILE_H264_BASELINE ||
  387. sps->profile_idc == FF_PROFILE_H264_EXTENDED ||
  388. sps->profile_idc == FF_PROFILE_H264_CAVLC_444);
  389. pps->num_ref_idx_l0_default_active_minus1 = 0;
  390. pps->num_ref_idx_l1_default_active_minus1 = 0;
  391. pps->pic_init_qp_minus26 = priv->fixed_qp_idr - 26;
  392. if (sps->profile_idc == FF_PROFILE_H264_BASELINE ||
  393. sps->profile_idc == FF_PROFILE_H264_EXTENDED ||
  394. sps->profile_idc == FF_PROFILE_H264_MAIN) {
  395. pps->more_rbsp_data = 0;
  396. } else {
  397. pps->more_rbsp_data = 1;
  398. pps->transform_8x8_mode_flag = 1;
  399. }
  400. *vseq = (VAEncSequenceParameterBufferH264) {
  401. .seq_parameter_set_id = sps->seq_parameter_set_id,
  402. .level_idc = sps->level_idc,
  403. .intra_period = avctx->gop_size,
  404. .intra_idr_period = avctx->gop_size,
  405. .ip_period = ctx->b_per_p + 1,
  406. .bits_per_second = avctx->bit_rate,
  407. .max_num_ref_frames = sps->max_num_ref_frames,
  408. .picture_width_in_mbs = sps->pic_width_in_mbs_minus1 + 1,
  409. .picture_height_in_mbs = sps->pic_height_in_map_units_minus1 + 1,
  410. .seq_fields.bits = {
  411. .chroma_format_idc = sps->chroma_format_idc,
  412. .frame_mbs_only_flag = sps->frame_mbs_only_flag,
  413. .mb_adaptive_frame_field_flag = sps->mb_adaptive_frame_field_flag,
  414. .seq_scaling_matrix_present_flag = sps->seq_scaling_matrix_present_flag,
  415. .direct_8x8_inference_flag = sps->direct_8x8_inference_flag,
  416. .log2_max_frame_num_minus4 = sps->log2_max_frame_num_minus4,
  417. .pic_order_cnt_type = sps->pic_order_cnt_type,
  418. .log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_pic_order_cnt_lsb_minus4,
  419. .delta_pic_order_always_zero_flag = sps->delta_pic_order_always_zero_flag,
  420. },
  421. .bit_depth_luma_minus8 = sps->bit_depth_luma_minus8,
  422. .bit_depth_chroma_minus8 = sps->bit_depth_chroma_minus8,
  423. .frame_cropping_flag = sps->frame_cropping_flag,
  424. .frame_crop_left_offset = sps->frame_crop_left_offset,
  425. .frame_crop_right_offset = sps->frame_crop_right_offset,
  426. .frame_crop_top_offset = sps->frame_crop_top_offset,
  427. .frame_crop_bottom_offset = sps->frame_crop_bottom_offset,
  428. .vui_parameters_present_flag = sps->vui_parameters_present_flag,
  429. .vui_fields.bits = {
  430. .aspect_ratio_info_present_flag = sps->vui.aspect_ratio_info_present_flag,
  431. .timing_info_present_flag = sps->vui.timing_info_present_flag,
  432. .bitstream_restriction_flag = sps->vui.bitstream_restriction_flag,
  433. .log2_max_mv_length_horizontal = sps->vui.log2_max_mv_length_horizontal,
  434. .log2_max_mv_length_vertical = sps->vui.log2_max_mv_length_vertical,
  435. },
  436. .aspect_ratio_idc = sps->vui.aspect_ratio_idc,
  437. .sar_width = sps->vui.sar_width,
  438. .sar_height = sps->vui.sar_height,
  439. .num_units_in_tick = sps->vui.num_units_in_tick,
  440. .time_scale = sps->vui.time_scale,
  441. };
  442. *vpic = (VAEncPictureParameterBufferH264) {
  443. .CurrPic = {
  444. .picture_id = VA_INVALID_ID,
  445. .flags = VA_PICTURE_H264_INVALID,
  446. },
  447. .coded_buf = VA_INVALID_ID,
  448. .pic_parameter_set_id = pps->pic_parameter_set_id,
  449. .seq_parameter_set_id = pps->seq_parameter_set_id,
  450. .pic_init_qp = pps->pic_init_qp_minus26 + 26,
  451. .num_ref_idx_l0_active_minus1 = pps->num_ref_idx_l0_default_active_minus1,
  452. .num_ref_idx_l1_active_minus1 = pps->num_ref_idx_l1_default_active_minus1,
  453. .chroma_qp_index_offset = pps->chroma_qp_index_offset,
  454. .second_chroma_qp_index_offset = pps->second_chroma_qp_index_offset,
  455. .pic_fields.bits = {
  456. .entropy_coding_mode_flag = pps->entropy_coding_mode_flag,
  457. .weighted_pred_flag = pps->weighted_pred_flag,
  458. .weighted_bipred_idc = pps->weighted_bipred_idc,
  459. .constrained_intra_pred_flag = pps->constrained_intra_pred_flag,
  460. .transform_8x8_mode_flag = pps->transform_8x8_mode_flag,
  461. .deblocking_filter_control_present_flag =
  462. pps->deblocking_filter_control_present_flag,
  463. .redundant_pic_cnt_present_flag = pps->redundant_pic_cnt_present_flag,
  464. .pic_order_present_flag =
  465. pps->bottom_field_pic_order_in_frame_present_flag,
  466. .pic_scaling_matrix_present_flag = pps->pic_scaling_matrix_present_flag,
  467. },
  468. };
  469. return 0;
  470. }
  471. static int vaapi_encode_h264_init_picture_params(AVCodecContext *avctx,
  472. VAAPIEncodePicture *pic)
  473. {
  474. VAAPIEncodeContext *ctx = avctx->priv_data;
  475. VAAPIEncodeH264Context *priv = ctx->priv_data;
  476. VAAPIEncodeH264Options *opt = ctx->codec_options;
  477. H264RawSPS *sps = &priv->sps;
  478. VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params;
  479. int i;
  480. memset(&priv->current_access_unit, 0,
  481. sizeof(priv->current_access_unit));
  482. if (pic->type == PICTURE_TYPE_IDR) {
  483. av_assert0(pic->display_order == pic->encode_order);
  484. priv->frame_num = 0;
  485. priv->next_frame_num = 1;
  486. priv->cpb_delay = 0;
  487. priv->last_idr_frame = pic->display_order;
  488. ++priv->idr_pic_count;
  489. priv->slice_type = 7;
  490. priv->primary_pic_type = 0;
  491. } else {
  492. priv->frame_num = priv->next_frame_num;
  493. if (pic->type != PICTURE_TYPE_B) {
  494. // Reference picture, so frame_num advances.
  495. priv->next_frame_num = (priv->frame_num + 1) &
  496. ((1 << (4 + sps->log2_max_frame_num_minus4)) - 1);
  497. }
  498. ++priv->cpb_delay;
  499. if (pic->type == PICTURE_TYPE_I) {
  500. priv->slice_type = 7;
  501. priv->primary_pic_type = 0;
  502. } else if (pic->type == PICTURE_TYPE_P) {
  503. priv->slice_type = 5;
  504. priv->primary_pic_type = 1;
  505. } else {
  506. priv->slice_type = 6;
  507. priv->primary_pic_type = 2;
  508. }
  509. }
  510. priv->pic_order_cnt = pic->display_order - priv->last_idr_frame;
  511. priv->dpb_delay = pic->display_order - pic->encode_order + 1;
  512. if (opt->aud) {
  513. priv->aud_needed = 1;
  514. priv->aud.nal_unit_header.nal_unit_type = H264_NAL_AUD;
  515. priv->aud.primary_pic_type = priv->primary_pic_type;
  516. } else {
  517. priv->aud_needed = 0;
  518. }
  519. if (opt->sei & SEI_IDENTIFIER && pic->encode_order == 0)
  520. priv->sei_needed = 1;
  521. #if !HAVE_VAAPI_1
  522. if (ctx->va_rc_mode == VA_RC_CBR)
  523. priv->sei_cbr_workaround_needed = 1;
  524. #endif
  525. if (opt->sei & SEI_TIMING) {
  526. memset(&priv->pic_timing, 0, sizeof(priv->pic_timing));
  527. priv->pic_timing.cpb_removal_delay = 2 * priv->cpb_delay;
  528. priv->pic_timing.dpb_output_delay = 2 * priv->dpb_delay;
  529. priv->sei_needed = 1;
  530. }
  531. if (opt->sei & SEI_RECOVERY_POINT && pic->type == PICTURE_TYPE_I) {
  532. priv->recovery_point.recovery_frame_cnt = 0;
  533. priv->recovery_point.exact_match_flag = 1;
  534. priv->recovery_point.broken_link_flag = ctx->b_per_p > 0;
  535. priv->sei_needed = 1;
  536. }
  537. vpic->CurrPic = (VAPictureH264) {
  538. .picture_id = pic->recon_surface,
  539. .frame_idx = priv->frame_num,
  540. .flags = 0,
  541. .TopFieldOrderCnt = priv->pic_order_cnt,
  542. .BottomFieldOrderCnt = priv->pic_order_cnt,
  543. };
  544. for (i = 0; i < pic->nb_refs; i++) {
  545. VAAPIEncodePicture *ref = pic->refs[i];
  546. unsigned int frame_num = (ref->encode_order - priv->last_idr_frame) &
  547. ((1 << (4 + sps->log2_max_frame_num_minus4)) - 1);
  548. unsigned int pic_order_cnt = ref->display_order - priv->last_idr_frame;
  549. av_assert0(ref && ref->encode_order < pic->encode_order);
  550. vpic->ReferenceFrames[i] = (VAPictureH264) {
  551. .picture_id = ref->recon_surface,
  552. .frame_idx = frame_num,
  553. .flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE,
  554. .TopFieldOrderCnt = pic_order_cnt,
  555. .BottomFieldOrderCnt = pic_order_cnt,
  556. };
  557. }
  558. for (; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) {
  559. vpic->ReferenceFrames[i] = (VAPictureH264) {
  560. .picture_id = VA_INVALID_ID,
  561. .flags = VA_PICTURE_H264_INVALID,
  562. };
  563. }
  564. vpic->coded_buf = pic->output_buffer;
  565. vpic->frame_num = priv->frame_num;
  566. vpic->pic_fields.bits.idr_pic_flag = (pic->type == PICTURE_TYPE_IDR);
  567. vpic->pic_fields.bits.reference_pic_flag = (pic->type != PICTURE_TYPE_B);
  568. pic->nb_slices = 1;
  569. return 0;
  570. }
  571. static int vaapi_encode_h264_init_slice_params(AVCodecContext *avctx,
  572. VAAPIEncodePicture *pic,
  573. VAAPIEncodeSlice *slice)
  574. {
  575. VAAPIEncodeContext *ctx = avctx->priv_data;
  576. VAAPIEncodeH264Context *priv = ctx->priv_data;
  577. H264RawSPS *sps = &priv->sps;
  578. H264RawPPS *pps = &priv->pps;
  579. H264RawSliceHeader *sh = &priv->slice.header;
  580. VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params;
  581. VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params;
  582. int i;
  583. if (pic->type == PICTURE_TYPE_IDR) {
  584. sh->nal_unit_header.nal_unit_type = H264_NAL_IDR_SLICE;
  585. sh->nal_unit_header.nal_ref_idc = 3;
  586. } else {
  587. sh->nal_unit_header.nal_unit_type = H264_NAL_SLICE;
  588. sh->nal_unit_header.nal_ref_idc = pic->type != PICTURE_TYPE_B;
  589. }
  590. // Only one slice per frame.
  591. sh->first_mb_in_slice = 0;
  592. sh->slice_type = priv->slice_type;
  593. sh->pic_parameter_set_id = pps->pic_parameter_set_id;
  594. sh->frame_num = priv->frame_num;
  595. sh->idr_pic_id = priv->idr_pic_count;
  596. sh->pic_order_cnt_lsb = priv->pic_order_cnt &
  597. ((1 << (4 + sps->log2_max_pic_order_cnt_lsb_minus4)) - 1);
  598. sh->direct_spatial_mv_pred_flag = 1;
  599. if (pic->type == PICTURE_TYPE_B)
  600. sh->slice_qp_delta = priv->fixed_qp_b - (pps->pic_init_qp_minus26 + 26);
  601. else if (pic->type == PICTURE_TYPE_P)
  602. sh->slice_qp_delta = priv->fixed_qp_p - (pps->pic_init_qp_minus26 + 26);
  603. else
  604. sh->slice_qp_delta = priv->fixed_qp_idr - (pps->pic_init_qp_minus26 + 26);
  605. vslice->macroblock_address = sh->first_mb_in_slice;
  606. vslice->num_macroblocks = priv->mb_width * priv->mb_height;
  607. vslice->macroblock_info = VA_INVALID_ID;
  608. vslice->slice_type = sh->slice_type % 5;
  609. vslice->pic_parameter_set_id = sh->pic_parameter_set_id;
  610. vslice->idr_pic_id = sh->idr_pic_id;
  611. vslice->pic_order_cnt_lsb = sh->pic_order_cnt_lsb;
  612. vslice->direct_spatial_mv_pred_flag = sh->direct_spatial_mv_pred_flag;
  613. for (i = 0; i < FF_ARRAY_ELEMS(vslice->RefPicList0); i++) {
  614. vslice->RefPicList0[i].picture_id = VA_INVALID_ID;
  615. vslice->RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
  616. vslice->RefPicList1[i].picture_id = VA_INVALID_ID;
  617. vslice->RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
  618. }
  619. av_assert0(pic->nb_refs <= 2);
  620. if (pic->nb_refs >= 1) {
  621. // Backward reference for P- or B-frame.
  622. av_assert0(pic->type == PICTURE_TYPE_P ||
  623. pic->type == PICTURE_TYPE_B);
  624. vslice->RefPicList0[0] = vpic->ReferenceFrames[0];
  625. }
  626. if (pic->nb_refs >= 2) {
  627. // Forward reference for B-frame.
  628. av_assert0(pic->type == PICTURE_TYPE_B);
  629. vslice->RefPicList1[0] = vpic->ReferenceFrames[1];
  630. }
  631. vslice->slice_qp_delta = sh->slice_qp_delta;
  632. return 0;
  633. }
  634. static av_cold int vaapi_encode_h264_configure(AVCodecContext *avctx)
  635. {
  636. VAAPIEncodeContext *ctx = avctx->priv_data;
  637. VAAPIEncodeH264Context *priv = ctx->priv_data;
  638. VAAPIEncodeH264Options *opt = ctx->codec_options;
  639. int err;
  640. err = ff_cbs_init(&priv->cbc, AV_CODEC_ID_H264, avctx);
  641. if (err < 0)
  642. return err;
  643. priv->mb_width = FFALIGN(avctx->width, 16) / 16;
  644. priv->mb_height = FFALIGN(avctx->height, 16) / 16;
  645. if (ctx->va_rc_mode == VA_RC_CQP) {
  646. priv->fixed_qp_p = opt->qp;
  647. if (avctx->i_quant_factor > 0.0)
  648. priv->fixed_qp_idr = (int)((priv->fixed_qp_p * avctx->i_quant_factor +
  649. avctx->i_quant_offset) + 0.5);
  650. else
  651. priv->fixed_qp_idr = priv->fixed_qp_p;
  652. if (avctx->b_quant_factor > 0.0)
  653. priv->fixed_qp_b = (int)((priv->fixed_qp_p * avctx->b_quant_factor +
  654. avctx->b_quant_offset) + 0.5);
  655. else
  656. priv->fixed_qp_b = priv->fixed_qp_p;
  657. opt->sei &= ~SEI_TIMING;
  658. av_log(avctx, AV_LOG_DEBUG, "Using fixed QP = "
  659. "%d / %d / %d for IDR- / P- / B-frames.\n",
  660. priv->fixed_qp_idr, priv->fixed_qp_p, priv->fixed_qp_b);
  661. } else if (ctx->va_rc_mode == VA_RC_CBR ||
  662. ctx->va_rc_mode == VA_RC_VBR) {
  663. // These still need to be set for pic_init_qp/slice_qp_delta.
  664. priv->fixed_qp_idr = 26;
  665. priv->fixed_qp_p = 26;
  666. priv->fixed_qp_b = 26;
  667. av_log(avctx, AV_LOG_DEBUG, "Using %s-bitrate = %d bps.\n",
  668. ctx->va_rc_mode == VA_RC_CBR ? "constant" : "variable",
  669. avctx->bit_rate);
  670. } else {
  671. av_assert0(0 && "Invalid RC mode.");
  672. }
  673. if (avctx->compression_level == FF_COMPRESSION_DEFAULT)
  674. avctx->compression_level = opt->quality;
  675. if (opt->sei & SEI_IDENTIFIER) {
  676. const char *lavc = LIBAVCODEC_IDENT;
  677. const char *vaapi = VA_VERSION_S;
  678. const char *driver;
  679. int len;
  680. memcpy(priv->identifier.uuid_iso_iec_11578,
  681. vaapi_encode_h264_sei_identifier_uuid,
  682. sizeof(priv->identifier.uuid_iso_iec_11578));
  683. driver = vaQueryVendorString(ctx->hwctx->display);
  684. if (!driver)
  685. driver = "unknown driver";
  686. len = snprintf(NULL, 0, "%s / VAAPI %s / %s", lavc, vaapi, driver);
  687. if (len >= 0) {
  688. priv->identifier_string = av_malloc(len + 1);
  689. if (!priv->identifier_string)
  690. return AVERROR(ENOMEM);
  691. snprintf(priv->identifier_string, len + 1,
  692. "%s / VAAPI %s / %s", lavc, vaapi, driver);
  693. priv->identifier.data = priv->identifier_string;
  694. priv->identifier.data_length = len + 1;
  695. }
  696. }
  697. return 0;
  698. }
  699. static const VAAPIEncodeType vaapi_encode_type_h264 = {
  700. .priv_data_size = sizeof(VAAPIEncodeH264Context),
  701. .configure = &vaapi_encode_h264_configure,
  702. .sequence_params_size = sizeof(VAEncSequenceParameterBufferH264),
  703. .init_sequence_params = &vaapi_encode_h264_init_sequence_params,
  704. .picture_params_size = sizeof(VAEncPictureParameterBufferH264),
  705. .init_picture_params = &vaapi_encode_h264_init_picture_params,
  706. .slice_params_size = sizeof(VAEncSliceParameterBufferH264),
  707. .init_slice_params = &vaapi_encode_h264_init_slice_params,
  708. .sequence_header_type = VAEncPackedHeaderSequence,
  709. .write_sequence_header = &vaapi_encode_h264_write_sequence_header,
  710. .slice_header_type = VAEncPackedHeaderH264_Slice,
  711. .write_slice_header = &vaapi_encode_h264_write_slice_header,
  712. .write_extra_header = &vaapi_encode_h264_write_extra_header,
  713. };
  714. static av_cold int vaapi_encode_h264_init(AVCodecContext *avctx)
  715. {
  716. VAAPIEncodeContext *ctx = avctx->priv_data;
  717. VAAPIEncodeH264Options *opt =
  718. (VAAPIEncodeH264Options*)ctx->codec_options_data;
  719. ctx->codec = &vaapi_encode_type_h264;
  720. switch (avctx->profile) {
  721. case FF_PROFILE_H264_BASELINE:
  722. av_log(avctx, AV_LOG_WARNING, "H.264 baseline profile is not "
  723. "supported, using constrained baseline profile instead.\n");
  724. avctx->profile = FF_PROFILE_H264_CONSTRAINED_BASELINE;
  725. case FF_PROFILE_H264_CONSTRAINED_BASELINE:
  726. ctx->va_profile = VAProfileH264ConstrainedBaseline;
  727. break;
  728. case FF_PROFILE_H264_MAIN:
  729. ctx->va_profile = VAProfileH264Main;
  730. break;
  731. case FF_PROFILE_H264_EXTENDED:
  732. av_log(avctx, AV_LOG_ERROR, "H.264 extended profile "
  733. "is not supported.\n");
  734. return AVERROR_PATCHWELCOME;
  735. case FF_PROFILE_UNKNOWN:
  736. case FF_PROFILE_H264_HIGH:
  737. ctx->va_profile = VAProfileH264High;
  738. break;
  739. case FF_PROFILE_H264_HIGH_10:
  740. case FF_PROFILE_H264_HIGH_10_INTRA:
  741. av_log(avctx, AV_LOG_ERROR, "H.264 10-bit profiles "
  742. "are not supported.\n");
  743. return AVERROR_PATCHWELCOME;
  744. case FF_PROFILE_H264_HIGH_422:
  745. case FF_PROFILE_H264_HIGH_422_INTRA:
  746. case FF_PROFILE_H264_HIGH_444:
  747. case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
  748. case FF_PROFILE_H264_HIGH_444_INTRA:
  749. case FF_PROFILE_H264_CAVLC_444:
  750. av_log(avctx, AV_LOG_ERROR, "H.264 non-4:2:0 profiles "
  751. "are not supported.\n");
  752. return AVERROR_PATCHWELCOME;
  753. default:
  754. av_log(avctx, AV_LOG_ERROR, "Unknown H.264 profile %d.\n",
  755. avctx->profile);
  756. return AVERROR(EINVAL);
  757. }
  758. if (opt->low_power) {
  759. #if VA_CHECK_VERSION(0, 39, 2)
  760. ctx->va_entrypoint = VAEntrypointEncSliceLP;
  761. #else
  762. av_log(avctx, AV_LOG_ERROR, "Low-power encoding is not "
  763. "supported with this VAAPI version.\n");
  764. return AVERROR(EINVAL);
  765. #endif
  766. } else {
  767. ctx->va_entrypoint = VAEntrypointEncSlice;
  768. }
  769. // Only 8-bit encode is supported.
  770. ctx->va_rt_format = VA_RT_FORMAT_YUV420;
  771. if (avctx->bit_rate > 0) {
  772. if (avctx->rc_max_rate == avctx->bit_rate)
  773. ctx->va_rc_mode = VA_RC_CBR;
  774. else
  775. ctx->va_rc_mode = VA_RC_VBR;
  776. } else
  777. ctx->va_rc_mode = VA_RC_CQP;
  778. ctx->va_packed_headers =
  779. VA_ENC_PACKED_HEADER_SEQUENCE | // SPS and PPS.
  780. VA_ENC_PACKED_HEADER_SLICE | // Slice headers.
  781. VA_ENC_PACKED_HEADER_MISC; // SEI.
  782. ctx->surface_width = FFALIGN(avctx->width, 16);
  783. ctx->surface_height = FFALIGN(avctx->height, 16);
  784. return ff_vaapi_encode_init(avctx);
  785. }
  786. static av_cold int vaapi_encode_h264_close(AVCodecContext *avctx)
  787. {
  788. VAAPIEncodeContext *ctx = avctx->priv_data;
  789. VAAPIEncodeH264Context *priv = ctx->priv_data;
  790. if (priv) {
  791. ff_cbs_close(&priv->cbc);
  792. av_freep(&priv->identifier_string);
  793. }
  794. return ff_vaapi_encode_close(avctx);
  795. }
  796. #define OFFSET(x) (offsetof(VAAPIEncodeContext, codec_options_data) + \
  797. offsetof(VAAPIEncodeH264Options, x))
  798. #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM)
  799. static const AVOption vaapi_encode_h264_options[] = {
  800. { "qp", "Constant QP (for P-frames; scaled by qfactor/qoffset for I/B)",
  801. OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 20 }, 0, 52, FLAGS },
  802. { "quality", "Set encode quality (trades off against speed, higher is faster)",
  803. OFFSET(quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 8, FLAGS },
  804. { "low_power", "Use low-power encoding mode (experimental: only supported "
  805. "on some platforms, does not support all features)",
  806. OFFSET(low_power), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
  807. { "aud", "Include AUD",
  808. OFFSET(aud), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
  809. { "sei", "Set SEI to include",
  810. OFFSET(sei), AV_OPT_TYPE_FLAGS,
  811. { .i64 = SEI_IDENTIFIER | SEI_TIMING | SEI_RECOVERY_POINT },
  812. 0, INT_MAX, FLAGS, "sei" },
  813. { "identifier", "Include encoder version identifier",
  814. 0, AV_OPT_TYPE_CONST, { .i64 = SEI_IDENTIFIER },
  815. INT_MIN, INT_MAX, FLAGS, "sei" },
  816. { "timing", "Include timing parameters (buffering_period and pic_timing)",
  817. 0, AV_OPT_TYPE_CONST, { .i64 = SEI_TIMING },
  818. INT_MIN, INT_MAX, FLAGS, "sei" },
  819. { "recovery_point", "Include recovery points where appropriate",
  820. 0, AV_OPT_TYPE_CONST, { .i64 = SEI_RECOVERY_POINT },
  821. INT_MIN, INT_MAX, FLAGS, "sei" },
  822. { NULL },
  823. };
  824. static const AVCodecDefault vaapi_encode_h264_defaults[] = {
  825. { "profile", "100" },
  826. { "level", "51" },
  827. { "b", "0" },
  828. { "bf", "2" },
  829. { "g", "120" },
  830. { "i_qfactor", "1.0" },
  831. { "i_qoffset", "0.0" },
  832. { "b_qfactor", "1.2" },
  833. { "b_qoffset", "0.0" },
  834. { "qmin", "0" },
  835. { NULL },
  836. };
  837. static const AVClass vaapi_encode_h264_class = {
  838. .class_name = "h264_vaapi",
  839. .item_name = av_default_item_name,
  840. .option = vaapi_encode_h264_options,
  841. .version = LIBAVUTIL_VERSION_INT,
  842. };
  843. AVCodec ff_h264_vaapi_encoder = {
  844. .name = "h264_vaapi",
  845. .long_name = NULL_IF_CONFIG_SMALL("H.264/AVC (VAAPI)"),
  846. .type = AVMEDIA_TYPE_VIDEO,
  847. .id = AV_CODEC_ID_H264,
  848. .priv_data_size = (sizeof(VAAPIEncodeContext) +
  849. sizeof(VAAPIEncodeH264Options)),
  850. .init = &vaapi_encode_h264_init,
  851. .encode2 = &ff_vaapi_encode2,
  852. .close = &vaapi_encode_h264_close,
  853. .priv_class = &vaapi_encode_h264_class,
  854. .capabilities = AV_CODEC_CAP_DELAY,
  855. .defaults = vaapi_encode_h264_defaults,
  856. .pix_fmts = (const enum AVPixelFormat[]) {
  857. AV_PIX_FMT_VAAPI,
  858. AV_PIX_FMT_NONE,
  859. },
  860. };