You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1103 lines
39KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <string.h>
  19. #include <va/va.h>
  20. #include <va/va_enc_h264.h>
  21. #include "libavutil/avassert.h"
  22. #include "libavutil/common.h"
  23. #include "libavutil/internal.h"
  24. #include "libavutil/opt.h"
  25. #include "avcodec.h"
  26. #include "cbs.h"
  27. #include "cbs_h264.h"
  28. #include "h264.h"
  29. #include "h264_sei.h"
  30. #include "internal.h"
  31. #include "vaapi_encode.h"
  32. enum {
  33. SEI_TIMING = 0x01,
  34. SEI_IDENTIFIER = 0x02,
  35. SEI_RECOVERY_POINT = 0x04,
  36. };
  37. // Random (version 4) ISO 11578 UUID.
  38. static const uint8_t vaapi_encode_h264_sei_identifier_uuid[16] = {
  39. 0x59, 0x94, 0x8b, 0x28, 0x11, 0xec, 0x45, 0xaf,
  40. 0x96, 0x75, 0x19, 0xd4, 0x1f, 0xea, 0xa9, 0x4d,
  41. };
  42. typedef struct VAAPIEncodeH264Context {
  43. VAAPIEncodeContext common;
  44. // User options.
  45. int qp;
  46. int quality;
  47. int low_power;
  48. int coder;
  49. int aud;
  50. int sei;
  51. int profile;
  52. int level;
  53. // Derived settings.
  54. int mb_width;
  55. int mb_height;
  56. int fixed_qp_idr;
  57. int fixed_qp_p;
  58. int fixed_qp_b;
  59. // Stream state.
  60. int frame_num;
  61. int pic_order_cnt;
  62. int next_frame_num;
  63. int64_t last_idr_frame;
  64. int64_t idr_pic_count;
  65. int primary_pic_type;
  66. int slice_type;
  67. int cpb_delay;
  68. int dpb_delay;
  69. // Writer structures.
  70. CodedBitstreamContext *cbc;
  71. CodedBitstreamFragment current_access_unit;
  72. H264RawAUD raw_aud;
  73. H264RawSPS raw_sps;
  74. H264RawPPS raw_pps;
  75. H264RawSEI raw_sei;
  76. H264RawSlice raw_slice;
  77. H264RawSEIBufferingPeriod sei_buffering_period;
  78. H264RawSEIPicTiming sei_pic_timing;
  79. H264RawSEIRecoveryPoint sei_recovery_point;
  80. H264RawSEIUserDataUnregistered sei_identifier;
  81. char *sei_identifier_string;
  82. int aud_needed;
  83. int sei_needed;
  84. int sei_cbr_workaround_needed;
  85. } VAAPIEncodeH264Context;
  86. static int vaapi_encode_h264_write_access_unit(AVCodecContext *avctx,
  87. char *data, size_t *data_len,
  88. CodedBitstreamFragment *au)
  89. {
  90. VAAPIEncodeH264Context *priv = avctx->priv_data;
  91. int err;
  92. err = ff_cbs_write_fragment_data(priv->cbc, au);
  93. if (err < 0) {
  94. av_log(avctx, AV_LOG_ERROR, "Failed to write packed header.\n");
  95. return err;
  96. }
  97. if (*data_len < 8 * au->data_size - au->data_bit_padding) {
  98. av_log(avctx, AV_LOG_ERROR, "Access unit too large: "
  99. "%zu < %zu.\n", *data_len,
  100. 8 * au->data_size - au->data_bit_padding);
  101. return AVERROR(ENOSPC);
  102. }
  103. memcpy(data, au->data, au->data_size);
  104. *data_len = 8 * au->data_size - au->data_bit_padding;
  105. return 0;
  106. }
  107. static int vaapi_encode_h264_add_nal(AVCodecContext *avctx,
  108. CodedBitstreamFragment *au,
  109. void *nal_unit)
  110. {
  111. VAAPIEncodeH264Context *priv = avctx->priv_data;
  112. H264RawNALUnitHeader *header = nal_unit;
  113. int err;
  114. err = ff_cbs_insert_unit_content(priv->cbc, au, -1,
  115. header->nal_unit_type, nal_unit, NULL);
  116. if (err < 0) {
  117. av_log(avctx, AV_LOG_ERROR, "Failed to add NAL unit: "
  118. "type = %d.\n", header->nal_unit_type);
  119. return err;
  120. }
  121. return 0;
  122. }
  123. static int vaapi_encode_h264_write_sequence_header(AVCodecContext *avctx,
  124. char *data, size_t *data_len)
  125. {
  126. VAAPIEncodeH264Context *priv = avctx->priv_data;
  127. CodedBitstreamFragment *au = &priv->current_access_unit;
  128. int err;
  129. if (priv->aud_needed) {
  130. err = vaapi_encode_h264_add_nal(avctx, au, &priv->raw_aud);
  131. if (err < 0)
  132. goto fail;
  133. priv->aud_needed = 0;
  134. }
  135. err = vaapi_encode_h264_add_nal(avctx, au, &priv->raw_sps);
  136. if (err < 0)
  137. goto fail;
  138. err = vaapi_encode_h264_add_nal(avctx, au, &priv->raw_pps);
  139. if (err < 0)
  140. goto fail;
  141. err = vaapi_encode_h264_write_access_unit(avctx, data, data_len, au);
  142. fail:
  143. ff_cbs_fragment_uninit(priv->cbc, au);
  144. return err;
  145. }
  146. static int vaapi_encode_h264_write_slice_header(AVCodecContext *avctx,
  147. VAAPIEncodePicture *pic,
  148. VAAPIEncodeSlice *slice,
  149. char *data, size_t *data_len)
  150. {
  151. VAAPIEncodeH264Context *priv = avctx->priv_data;
  152. CodedBitstreamFragment *au = &priv->current_access_unit;
  153. int err;
  154. if (priv->aud_needed) {
  155. err = vaapi_encode_h264_add_nal(avctx, au, &priv->raw_aud);
  156. if (err < 0)
  157. goto fail;
  158. priv->aud_needed = 0;
  159. }
  160. err = vaapi_encode_h264_add_nal(avctx, au, &priv->raw_slice);
  161. if (err < 0)
  162. goto fail;
  163. err = vaapi_encode_h264_write_access_unit(avctx, data, data_len, au);
  164. fail:
  165. ff_cbs_fragment_uninit(priv->cbc, au);
  166. return err;
  167. }
  168. static int vaapi_encode_h264_write_extra_header(AVCodecContext *avctx,
  169. VAAPIEncodePicture *pic,
  170. int index, int *type,
  171. char *data, size_t *data_len)
  172. {
  173. VAAPIEncodeH264Context *priv = avctx->priv_data;
  174. CodedBitstreamFragment *au = &priv->current_access_unit;
  175. int err, i;
  176. if (priv->sei_needed) {
  177. H264RawSEI *sei = &priv->raw_sei;
  178. if (priv->aud_needed) {
  179. err = vaapi_encode_h264_add_nal(avctx, au, &priv->raw_aud);
  180. if (err < 0)
  181. goto fail;
  182. priv->aud_needed = 0;
  183. }
  184. *sei = (H264RawSEI) {
  185. .nal_unit_header = {
  186. .nal_unit_type = H264_NAL_SEI,
  187. },
  188. };
  189. i = 0;
  190. if (priv->sei_needed & SEI_IDENTIFIER) {
  191. sei->payload[i].payload_type = H264_SEI_TYPE_USER_DATA_UNREGISTERED;
  192. sei->payload[i].payload.user_data_unregistered = priv->sei_identifier;
  193. ++i;
  194. }
  195. if (priv->sei_needed & SEI_TIMING) {
  196. if (pic->type == PICTURE_TYPE_IDR) {
  197. sei->payload[i].payload_type = H264_SEI_TYPE_BUFFERING_PERIOD;
  198. sei->payload[i].payload.buffering_period = priv->sei_buffering_period;
  199. ++i;
  200. }
  201. sei->payload[i].payload_type = H264_SEI_TYPE_PIC_TIMING;
  202. sei->payload[i].payload.pic_timing = priv->sei_pic_timing;
  203. ++i;
  204. }
  205. if (priv->sei_needed & SEI_RECOVERY_POINT) {
  206. sei->payload[i].payload_type = H264_SEI_TYPE_RECOVERY_POINT;
  207. sei->payload[i].payload.recovery_point = priv->sei_recovery_point;
  208. ++i;
  209. }
  210. sei->payload_count = i;
  211. av_assert0(sei->payload_count > 0);
  212. err = vaapi_encode_h264_add_nal(avctx, au, sei);
  213. if (err < 0)
  214. goto fail;
  215. priv->sei_needed = 0;
  216. err = vaapi_encode_h264_write_access_unit(avctx, data, data_len, au);
  217. if (err < 0)
  218. goto fail;
  219. ff_cbs_fragment_uninit(priv->cbc, au);
  220. *type = VAEncPackedHeaderRawData;
  221. return 0;
  222. #if !CONFIG_VAAPI_1
  223. } else if (priv->sei_cbr_workaround_needed) {
  224. // Insert a zero-length header using the old SEI type. This is
  225. // required to avoid triggering broken behaviour on Intel platforms
  226. // in CBR mode where an invalid SEI message is generated by the
  227. // driver and inserted into the stream.
  228. *data_len = 0;
  229. *type = VAEncPackedHeaderH264_SEI;
  230. priv->sei_cbr_workaround_needed = 0;
  231. return 0;
  232. #endif
  233. } else {
  234. return AVERROR_EOF;
  235. }
  236. fail:
  237. ff_cbs_fragment_uninit(priv->cbc, au);
  238. return err;
  239. }
  240. static int vaapi_encode_h264_init_sequence_params(AVCodecContext *avctx)
  241. {
  242. VAAPIEncodeContext *ctx = avctx->priv_data;
  243. VAAPIEncodeH264Context *priv = avctx->priv_data;
  244. H264RawSPS *sps = &priv->raw_sps;
  245. H264RawPPS *pps = &priv->raw_pps;
  246. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  247. VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params;
  248. memset(&priv->current_access_unit, 0,
  249. sizeof(priv->current_access_unit));
  250. memset(sps, 0, sizeof(*sps));
  251. memset(pps, 0, sizeof(*pps));
  252. sps->nal_unit_header.nal_ref_idc = 3;
  253. sps->nal_unit_header.nal_unit_type = H264_NAL_SPS;
  254. sps->profile_idc = avctx->profile & 0xff;
  255. sps->constraint_set1_flag =
  256. !!(avctx->profile & FF_PROFILE_H264_CONSTRAINED);
  257. sps->constraint_set3_flag =
  258. !!(avctx->profile & FF_PROFILE_H264_INTRA);
  259. sps->level_idc = avctx->level;
  260. sps->seq_parameter_set_id = 0;
  261. sps->chroma_format_idc = 1;
  262. sps->log2_max_frame_num_minus4 = 4;
  263. sps->pic_order_cnt_type = 0;
  264. sps->log2_max_pic_order_cnt_lsb_minus4 =
  265. av_clip(av_log2(ctx->b_per_p + 1) - 2, 0, 12);
  266. sps->max_num_ref_frames =
  267. (avctx->profile & FF_PROFILE_H264_INTRA) ? 0 :
  268. 1 + (ctx->b_per_p > 0);
  269. sps->pic_width_in_mbs_minus1 = priv->mb_width - 1;
  270. sps->pic_height_in_map_units_minus1 = priv->mb_height - 1;
  271. sps->frame_mbs_only_flag = 1;
  272. sps->direct_8x8_inference_flag = 1;
  273. if (avctx->width != 16 * priv->mb_width ||
  274. avctx->height != 16 * priv->mb_height) {
  275. sps->frame_cropping_flag = 1;
  276. sps->frame_crop_left_offset = 0;
  277. sps->frame_crop_right_offset =
  278. (16 * priv->mb_width - avctx->width) / 2;
  279. sps->frame_crop_top_offset = 0;
  280. sps->frame_crop_bottom_offset =
  281. (16 * priv->mb_height - avctx->height) / 2;
  282. } else {
  283. sps->frame_cropping_flag = 0;
  284. }
  285. sps->vui_parameters_present_flag = 1;
  286. if (avctx->sample_aspect_ratio.num != 0 &&
  287. avctx->sample_aspect_ratio.den != 0) {
  288. static const AVRational sar_idc[] = {
  289. { 0, 0 },
  290. { 1, 1 }, { 12, 11 }, { 10, 11 }, { 16, 11 },
  291. { 40, 33 }, { 24, 11 }, { 20, 11 }, { 32, 11 },
  292. { 80, 33 }, { 18, 11 }, { 15, 11 }, { 64, 33 },
  293. { 160, 99 }, { 4, 3 }, { 3, 2 }, { 2, 1 },
  294. };
  295. int i;
  296. for (i = 0; i < FF_ARRAY_ELEMS(sar_idc); i++) {
  297. if (avctx->sample_aspect_ratio.num == sar_idc[i].num &&
  298. avctx->sample_aspect_ratio.den == sar_idc[i].den) {
  299. sps->vui.aspect_ratio_idc = i;
  300. break;
  301. }
  302. }
  303. if (i >= FF_ARRAY_ELEMS(sar_idc)) {
  304. sps->vui.aspect_ratio_idc = 255;
  305. sps->vui.sar_width = avctx->sample_aspect_ratio.num;
  306. sps->vui.sar_height = avctx->sample_aspect_ratio.den;
  307. }
  308. sps->vui.aspect_ratio_info_present_flag = 1;
  309. }
  310. if (avctx->color_range != AVCOL_RANGE_UNSPECIFIED ||
  311. avctx->color_primaries != AVCOL_PRI_UNSPECIFIED ||
  312. avctx->color_trc != AVCOL_TRC_UNSPECIFIED ||
  313. avctx->colorspace != AVCOL_SPC_UNSPECIFIED) {
  314. sps->vui.video_signal_type_present_flag = 1;
  315. sps->vui.video_format = 5; // Unspecified.
  316. sps->vui.video_full_range_flag =
  317. avctx->color_range == AVCOL_RANGE_JPEG;
  318. if (avctx->color_primaries != AVCOL_PRI_UNSPECIFIED ||
  319. avctx->color_trc != AVCOL_TRC_UNSPECIFIED ||
  320. avctx->colorspace != AVCOL_SPC_UNSPECIFIED) {
  321. sps->vui.colour_description_present_flag = 1;
  322. sps->vui.colour_primaries = avctx->color_primaries;
  323. sps->vui.transfer_characteristics = avctx->color_trc;
  324. sps->vui.matrix_coefficients = avctx->colorspace;
  325. }
  326. } else {
  327. sps->vui.video_format = 5;
  328. sps->vui.video_full_range_flag = 0;
  329. sps->vui.colour_primaries = avctx->color_primaries;
  330. sps->vui.transfer_characteristics = avctx->color_trc;
  331. sps->vui.matrix_coefficients = avctx->colorspace;
  332. }
  333. if (avctx->chroma_sample_location != AVCHROMA_LOC_UNSPECIFIED) {
  334. sps->vui.chroma_loc_info_present_flag = 1;
  335. sps->vui.chroma_sample_loc_type_top_field =
  336. sps->vui.chroma_sample_loc_type_bottom_field =
  337. avctx->chroma_sample_location - 1;
  338. }
  339. sps->vui.timing_info_present_flag = 1;
  340. if (avctx->framerate.num > 0 && avctx->framerate.den > 0) {
  341. sps->vui.num_units_in_tick = avctx->framerate.den;
  342. sps->vui.time_scale = 2 * avctx->framerate.num;
  343. sps->vui.fixed_frame_rate_flag = 1;
  344. } else {
  345. sps->vui.num_units_in_tick = avctx->time_base.num;
  346. sps->vui.time_scale = 2 * avctx->time_base.den;
  347. sps->vui.fixed_frame_rate_flag = 0;
  348. }
  349. if (priv->sei & SEI_TIMING) {
  350. H264RawHRD *hrd = &sps->vui.nal_hrd_parameters;
  351. H264RawSEIBufferingPeriod *bp = &priv->sei_buffering_period;
  352. sps->vui.nal_hrd_parameters_present_flag = 1;
  353. hrd->cpb_cnt_minus1 = 0;
  354. // Try to scale these to a sensible range so that the
  355. // golomb encode of the value is not overlong.
  356. hrd->bit_rate_scale =
  357. av_clip_uintp2(av_log2(avctx->bit_rate) - 15 - 6, 4);
  358. hrd->bit_rate_value_minus1[0] =
  359. (avctx->bit_rate >> hrd->bit_rate_scale + 6) - 1;
  360. hrd->cpb_size_scale =
  361. av_clip_uintp2(av_log2(ctx->hrd_params.hrd.buffer_size) - 15 - 4, 4);
  362. hrd->cpb_size_value_minus1[0] =
  363. (ctx->hrd_params.hrd.buffer_size >> hrd->cpb_size_scale + 4) - 1;
  364. // CBR mode as defined for the HRD cannot be achieved without filler
  365. // data, so this flag cannot be set even with VAAPI CBR modes.
  366. hrd->cbr_flag[0] = 0;
  367. hrd->initial_cpb_removal_delay_length_minus1 = 23;
  368. hrd->cpb_removal_delay_length_minus1 = 23;
  369. hrd->dpb_output_delay_length_minus1 = 7;
  370. hrd->time_offset_length = 0;
  371. bp->seq_parameter_set_id = sps->seq_parameter_set_id;
  372. // This calculation can easily overflow 32 bits.
  373. bp->nal.initial_cpb_removal_delay[0] = 90000 *
  374. (uint64_t)ctx->hrd_params.hrd.initial_buffer_fullness /
  375. ctx->hrd_params.hrd.buffer_size;
  376. bp->nal.initial_cpb_removal_delay_offset[0] = 0;
  377. } else {
  378. sps->vui.nal_hrd_parameters_present_flag = 0;
  379. sps->vui.low_delay_hrd_flag = 1 - sps->vui.fixed_frame_rate_flag;
  380. }
  381. sps->vui.bitstream_restriction_flag = 1;
  382. sps->vui.motion_vectors_over_pic_boundaries_flag = 1;
  383. sps->vui.log2_max_mv_length_horizontal = 16;
  384. sps->vui.log2_max_mv_length_vertical = 16;
  385. sps->vui.max_num_reorder_frames = (ctx->b_per_p > 0);
  386. sps->vui.max_dec_frame_buffering = sps->max_num_ref_frames;
  387. pps->nal_unit_header.nal_ref_idc = 3;
  388. pps->nal_unit_header.nal_unit_type = H264_NAL_PPS;
  389. pps->pic_parameter_set_id = 0;
  390. pps->seq_parameter_set_id = 0;
  391. pps->entropy_coding_mode_flag =
  392. !(sps->profile_idc == FF_PROFILE_H264_BASELINE ||
  393. sps->profile_idc == FF_PROFILE_H264_EXTENDED ||
  394. sps->profile_idc == FF_PROFILE_H264_CAVLC_444);
  395. if (!priv->coder && pps->entropy_coding_mode_flag)
  396. pps->entropy_coding_mode_flag = 0;
  397. pps->num_ref_idx_l0_default_active_minus1 = 0;
  398. pps->num_ref_idx_l1_default_active_minus1 = 0;
  399. pps->pic_init_qp_minus26 = priv->fixed_qp_idr - 26;
  400. if (sps->profile_idc == FF_PROFILE_H264_BASELINE ||
  401. sps->profile_idc == FF_PROFILE_H264_EXTENDED ||
  402. sps->profile_idc == FF_PROFILE_H264_MAIN) {
  403. pps->more_rbsp_data = 0;
  404. } else {
  405. pps->more_rbsp_data = 1;
  406. pps->transform_8x8_mode_flag = 1;
  407. }
  408. *vseq = (VAEncSequenceParameterBufferH264) {
  409. .seq_parameter_set_id = sps->seq_parameter_set_id,
  410. .level_idc = sps->level_idc,
  411. .intra_period = avctx->gop_size,
  412. .intra_idr_period = avctx->gop_size,
  413. .ip_period = ctx->b_per_p + 1,
  414. .bits_per_second = avctx->bit_rate,
  415. .max_num_ref_frames = sps->max_num_ref_frames,
  416. .picture_width_in_mbs = sps->pic_width_in_mbs_minus1 + 1,
  417. .picture_height_in_mbs = sps->pic_height_in_map_units_minus1 + 1,
  418. .seq_fields.bits = {
  419. .chroma_format_idc = sps->chroma_format_idc,
  420. .frame_mbs_only_flag = sps->frame_mbs_only_flag,
  421. .mb_adaptive_frame_field_flag = sps->mb_adaptive_frame_field_flag,
  422. .seq_scaling_matrix_present_flag = sps->seq_scaling_matrix_present_flag,
  423. .direct_8x8_inference_flag = sps->direct_8x8_inference_flag,
  424. .log2_max_frame_num_minus4 = sps->log2_max_frame_num_minus4,
  425. .pic_order_cnt_type = sps->pic_order_cnt_type,
  426. .log2_max_pic_order_cnt_lsb_minus4 = sps->log2_max_pic_order_cnt_lsb_minus4,
  427. .delta_pic_order_always_zero_flag = sps->delta_pic_order_always_zero_flag,
  428. },
  429. .bit_depth_luma_minus8 = sps->bit_depth_luma_minus8,
  430. .bit_depth_chroma_minus8 = sps->bit_depth_chroma_minus8,
  431. .frame_cropping_flag = sps->frame_cropping_flag,
  432. .frame_crop_left_offset = sps->frame_crop_left_offset,
  433. .frame_crop_right_offset = sps->frame_crop_right_offset,
  434. .frame_crop_top_offset = sps->frame_crop_top_offset,
  435. .frame_crop_bottom_offset = sps->frame_crop_bottom_offset,
  436. .vui_parameters_present_flag = sps->vui_parameters_present_flag,
  437. .vui_fields.bits = {
  438. .aspect_ratio_info_present_flag = sps->vui.aspect_ratio_info_present_flag,
  439. .timing_info_present_flag = sps->vui.timing_info_present_flag,
  440. .bitstream_restriction_flag = sps->vui.bitstream_restriction_flag,
  441. .log2_max_mv_length_horizontal = sps->vui.log2_max_mv_length_horizontal,
  442. .log2_max_mv_length_vertical = sps->vui.log2_max_mv_length_vertical,
  443. },
  444. .aspect_ratio_idc = sps->vui.aspect_ratio_idc,
  445. .sar_width = sps->vui.sar_width,
  446. .sar_height = sps->vui.sar_height,
  447. .num_units_in_tick = sps->vui.num_units_in_tick,
  448. .time_scale = sps->vui.time_scale,
  449. };
  450. *vpic = (VAEncPictureParameterBufferH264) {
  451. .CurrPic = {
  452. .picture_id = VA_INVALID_ID,
  453. .flags = VA_PICTURE_H264_INVALID,
  454. },
  455. .coded_buf = VA_INVALID_ID,
  456. .pic_parameter_set_id = pps->pic_parameter_set_id,
  457. .seq_parameter_set_id = pps->seq_parameter_set_id,
  458. .pic_init_qp = pps->pic_init_qp_minus26 + 26,
  459. .num_ref_idx_l0_active_minus1 = pps->num_ref_idx_l0_default_active_minus1,
  460. .num_ref_idx_l1_active_minus1 = pps->num_ref_idx_l1_default_active_minus1,
  461. .chroma_qp_index_offset = pps->chroma_qp_index_offset,
  462. .second_chroma_qp_index_offset = pps->second_chroma_qp_index_offset,
  463. .pic_fields.bits = {
  464. .entropy_coding_mode_flag = pps->entropy_coding_mode_flag,
  465. .weighted_pred_flag = pps->weighted_pred_flag,
  466. .weighted_bipred_idc = pps->weighted_bipred_idc,
  467. .constrained_intra_pred_flag = pps->constrained_intra_pred_flag,
  468. .transform_8x8_mode_flag = pps->transform_8x8_mode_flag,
  469. .deblocking_filter_control_present_flag =
  470. pps->deblocking_filter_control_present_flag,
  471. .redundant_pic_cnt_present_flag = pps->redundant_pic_cnt_present_flag,
  472. .pic_order_present_flag =
  473. pps->bottom_field_pic_order_in_frame_present_flag,
  474. .pic_scaling_matrix_present_flag = pps->pic_scaling_matrix_present_flag,
  475. },
  476. };
  477. return 0;
  478. }
  479. static int vaapi_encode_h264_init_picture_params(AVCodecContext *avctx,
  480. VAAPIEncodePicture *pic)
  481. {
  482. VAAPIEncodeContext *ctx = avctx->priv_data;
  483. VAAPIEncodeH264Context *priv = avctx->priv_data;
  484. H264RawSPS *sps = &priv->raw_sps;
  485. VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params;
  486. int i;
  487. memset(&priv->current_access_unit, 0,
  488. sizeof(priv->current_access_unit));
  489. if (pic->type == PICTURE_TYPE_IDR) {
  490. av_assert0(pic->display_order == pic->encode_order);
  491. priv->frame_num = 0;
  492. priv->next_frame_num = 1;
  493. priv->cpb_delay = 0;
  494. priv->last_idr_frame = pic->display_order;
  495. ++priv->idr_pic_count;
  496. priv->slice_type = 7;
  497. priv->primary_pic_type = 0;
  498. } else {
  499. priv->frame_num = priv->next_frame_num;
  500. if (pic->type != PICTURE_TYPE_B) {
  501. // Reference picture, so frame_num advances.
  502. priv->next_frame_num = (priv->frame_num + 1) &
  503. ((1 << (4 + sps->log2_max_frame_num_minus4)) - 1);
  504. }
  505. ++priv->cpb_delay;
  506. if (pic->type == PICTURE_TYPE_I) {
  507. priv->slice_type = 7;
  508. priv->primary_pic_type = 0;
  509. } else if (pic->type == PICTURE_TYPE_P) {
  510. priv->slice_type = 5;
  511. priv->primary_pic_type = 1;
  512. } else {
  513. priv->slice_type = 6;
  514. priv->primary_pic_type = 2;
  515. }
  516. }
  517. priv->pic_order_cnt = pic->display_order - priv->last_idr_frame;
  518. priv->dpb_delay = pic->display_order - pic->encode_order + 1;
  519. if (priv->aud) {
  520. priv->aud_needed = 1;
  521. priv->raw_aud = (H264RawAUD) {
  522. .nal_unit_header = {
  523. .nal_unit_type = H264_NAL_AUD,
  524. },
  525. .primary_pic_type = priv->primary_pic_type,
  526. };
  527. } else {
  528. priv->aud_needed = 0;
  529. }
  530. priv->sei_needed = 0;
  531. if (priv->sei & SEI_IDENTIFIER && pic->encode_order == 0)
  532. priv->sei_needed |= SEI_IDENTIFIER;
  533. #if !CONFIG_VAAPI_1
  534. if (ctx->va_rc_mode == VA_RC_CBR)
  535. priv->sei_cbr_workaround_needed = 1;
  536. #endif
  537. if (priv->sei & SEI_TIMING) {
  538. priv->sei_pic_timing = (H264RawSEIPicTiming) {
  539. .cpb_removal_delay = 2 * priv->cpb_delay,
  540. .dpb_output_delay = 2 * priv->dpb_delay,
  541. };
  542. priv->sei_needed |= SEI_TIMING;
  543. }
  544. if (priv->sei & SEI_RECOVERY_POINT && pic->type == PICTURE_TYPE_I) {
  545. priv->sei_recovery_point = (H264RawSEIRecoveryPoint) {
  546. .recovery_frame_cnt = 0,
  547. .exact_match_flag = 1,
  548. .broken_link_flag = ctx->b_per_p > 0,
  549. };
  550. priv->sei_needed |= SEI_RECOVERY_POINT;
  551. }
  552. vpic->CurrPic = (VAPictureH264) {
  553. .picture_id = pic->recon_surface,
  554. .frame_idx = priv->frame_num,
  555. .flags = 0,
  556. .TopFieldOrderCnt = priv->pic_order_cnt,
  557. .BottomFieldOrderCnt = priv->pic_order_cnt,
  558. };
  559. for (i = 0; i < pic->nb_refs; i++) {
  560. VAAPIEncodePicture *ref = pic->refs[i];
  561. unsigned int frame_num = (ref->encode_order - priv->last_idr_frame) &
  562. ((1 << (4 + sps->log2_max_frame_num_minus4)) - 1);
  563. unsigned int pic_order_cnt = ref->display_order - priv->last_idr_frame;
  564. av_assert0(ref && ref->encode_order < pic->encode_order);
  565. vpic->ReferenceFrames[i] = (VAPictureH264) {
  566. .picture_id = ref->recon_surface,
  567. .frame_idx = frame_num,
  568. .flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE,
  569. .TopFieldOrderCnt = pic_order_cnt,
  570. .BottomFieldOrderCnt = pic_order_cnt,
  571. };
  572. }
  573. for (; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) {
  574. vpic->ReferenceFrames[i] = (VAPictureH264) {
  575. .picture_id = VA_INVALID_ID,
  576. .flags = VA_PICTURE_H264_INVALID,
  577. };
  578. }
  579. vpic->coded_buf = pic->output_buffer;
  580. vpic->frame_num = priv->frame_num;
  581. vpic->pic_fields.bits.idr_pic_flag = (pic->type == PICTURE_TYPE_IDR);
  582. vpic->pic_fields.bits.reference_pic_flag = (pic->type != PICTURE_TYPE_B);
  583. pic->nb_slices = 1;
  584. return 0;
  585. }
  586. static int vaapi_encode_h264_init_slice_params(AVCodecContext *avctx,
  587. VAAPIEncodePicture *pic,
  588. VAAPIEncodeSlice *slice)
  589. {
  590. VAAPIEncodeH264Context *priv = avctx->priv_data;
  591. H264RawSPS *sps = &priv->raw_sps;
  592. H264RawPPS *pps = &priv->raw_pps;
  593. H264RawSliceHeader *sh = &priv->raw_slice.header;
  594. VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params;
  595. VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params;
  596. int i;
  597. if (pic->type == PICTURE_TYPE_IDR) {
  598. sh->nal_unit_header.nal_unit_type = H264_NAL_IDR_SLICE;
  599. sh->nal_unit_header.nal_ref_idc = 3;
  600. } else {
  601. sh->nal_unit_header.nal_unit_type = H264_NAL_SLICE;
  602. sh->nal_unit_header.nal_ref_idc = pic->type != PICTURE_TYPE_B;
  603. }
  604. // Only one slice per frame.
  605. sh->first_mb_in_slice = 0;
  606. sh->slice_type = priv->slice_type;
  607. sh->pic_parameter_set_id = pps->pic_parameter_set_id;
  608. sh->frame_num = priv->frame_num;
  609. sh->idr_pic_id = priv->idr_pic_count;
  610. sh->pic_order_cnt_lsb = priv->pic_order_cnt &
  611. ((1 << (4 + sps->log2_max_pic_order_cnt_lsb_minus4)) - 1);
  612. sh->direct_spatial_mv_pred_flag = 1;
  613. if (pic->type == PICTURE_TYPE_B)
  614. sh->slice_qp_delta = priv->fixed_qp_b - (pps->pic_init_qp_minus26 + 26);
  615. else if (pic->type == PICTURE_TYPE_P)
  616. sh->slice_qp_delta = priv->fixed_qp_p - (pps->pic_init_qp_minus26 + 26);
  617. else
  618. sh->slice_qp_delta = priv->fixed_qp_idr - (pps->pic_init_qp_minus26 + 26);
  619. vslice->macroblock_address = sh->first_mb_in_slice;
  620. vslice->num_macroblocks = priv->mb_width * priv->mb_height;
  621. vslice->macroblock_info = VA_INVALID_ID;
  622. vslice->slice_type = sh->slice_type % 5;
  623. vslice->pic_parameter_set_id = sh->pic_parameter_set_id;
  624. vslice->idr_pic_id = sh->idr_pic_id;
  625. vslice->pic_order_cnt_lsb = sh->pic_order_cnt_lsb;
  626. vslice->direct_spatial_mv_pred_flag = sh->direct_spatial_mv_pred_flag;
  627. for (i = 0; i < FF_ARRAY_ELEMS(vslice->RefPicList0); i++) {
  628. vslice->RefPicList0[i].picture_id = VA_INVALID_ID;
  629. vslice->RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
  630. vslice->RefPicList1[i].picture_id = VA_INVALID_ID;
  631. vslice->RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
  632. }
  633. av_assert0(pic->nb_refs <= 2);
  634. if (pic->nb_refs >= 1) {
  635. // Backward reference for P- or B-frame.
  636. av_assert0(pic->type == PICTURE_TYPE_P ||
  637. pic->type == PICTURE_TYPE_B);
  638. vslice->RefPicList0[0] = vpic->ReferenceFrames[0];
  639. }
  640. if (pic->nb_refs >= 2) {
  641. // Forward reference for B-frame.
  642. av_assert0(pic->type == PICTURE_TYPE_B);
  643. vslice->RefPicList1[0] = vpic->ReferenceFrames[1];
  644. }
  645. vslice->slice_qp_delta = sh->slice_qp_delta;
  646. return 0;
  647. }
  648. static av_cold int vaapi_encode_h264_configure(AVCodecContext *avctx)
  649. {
  650. VAAPIEncodeContext *ctx = avctx->priv_data;
  651. VAAPIEncodeH264Context *priv = avctx->priv_data;
  652. int err;
  653. err = ff_cbs_init(&priv->cbc, AV_CODEC_ID_H264, avctx);
  654. if (err < 0)
  655. return err;
  656. priv->mb_width = FFALIGN(avctx->width, 16) / 16;
  657. priv->mb_height = FFALIGN(avctx->height, 16) / 16;
  658. if (ctx->va_rc_mode == VA_RC_CQP) {
  659. priv->fixed_qp_p = priv->qp;
  660. if (avctx->i_quant_factor > 0.0)
  661. priv->fixed_qp_idr = (int)((priv->fixed_qp_p * avctx->i_quant_factor +
  662. avctx->i_quant_offset) + 0.5);
  663. else
  664. priv->fixed_qp_idr = priv->fixed_qp_p;
  665. if (avctx->b_quant_factor > 0.0)
  666. priv->fixed_qp_b = (int)((priv->fixed_qp_p * avctx->b_quant_factor +
  667. avctx->b_quant_offset) + 0.5);
  668. else
  669. priv->fixed_qp_b = priv->fixed_qp_p;
  670. priv->sei &= ~SEI_TIMING;
  671. av_log(avctx, AV_LOG_DEBUG, "Using fixed QP = "
  672. "%d / %d / %d for IDR- / P- / B-frames.\n",
  673. priv->fixed_qp_idr, priv->fixed_qp_p, priv->fixed_qp_b);
  674. } else if (ctx->va_rc_mode == VA_RC_CBR ||
  675. ctx->va_rc_mode == VA_RC_VBR) {
  676. // These still need to be set for pic_init_qp/slice_qp_delta.
  677. priv->fixed_qp_idr = 26;
  678. priv->fixed_qp_p = 26;
  679. priv->fixed_qp_b = 26;
  680. av_log(avctx, AV_LOG_DEBUG, "Using %s-bitrate = %"PRId64" bps.\n",
  681. ctx->va_rc_mode == VA_RC_CBR ? "constant" : "variable",
  682. avctx->bit_rate);
  683. } else {
  684. av_assert0(0 && "Invalid RC mode.");
  685. }
  686. if (avctx->compression_level == FF_COMPRESSION_DEFAULT)
  687. avctx->compression_level = priv->quality;
  688. if (priv->sei & SEI_IDENTIFIER) {
  689. const char *lavc = LIBAVCODEC_IDENT;
  690. const char *vaapi = VA_VERSION_S;
  691. const char *driver;
  692. int len;
  693. memcpy(priv->sei_identifier.uuid_iso_iec_11578,
  694. vaapi_encode_h264_sei_identifier_uuid,
  695. sizeof(priv->sei_identifier.uuid_iso_iec_11578));
  696. driver = vaQueryVendorString(ctx->hwctx->display);
  697. if (!driver)
  698. driver = "unknown driver";
  699. len = snprintf(NULL, 0, "%s / VAAPI %s / %s", lavc, vaapi, driver);
  700. if (len >= 0) {
  701. priv->sei_identifier_string = av_malloc(len + 1);
  702. if (!priv->sei_identifier_string)
  703. return AVERROR(ENOMEM);
  704. snprintf(priv->sei_identifier_string, len + 1,
  705. "%s / VAAPI %s / %s", lavc, vaapi, driver);
  706. priv->sei_identifier.data = priv->sei_identifier_string;
  707. priv->sei_identifier.data_length = len + 1;
  708. }
  709. }
  710. return 0;
  711. }
  712. static const VAAPIEncodeType vaapi_encode_type_h264 = {
  713. .priv_data_size = sizeof(VAAPIEncodeH264Context),
  714. .configure = &vaapi_encode_h264_configure,
  715. .sequence_params_size = sizeof(VAEncSequenceParameterBufferH264),
  716. .init_sequence_params = &vaapi_encode_h264_init_sequence_params,
  717. .picture_params_size = sizeof(VAEncPictureParameterBufferH264),
  718. .init_picture_params = &vaapi_encode_h264_init_picture_params,
  719. .slice_params_size = sizeof(VAEncSliceParameterBufferH264),
  720. .init_slice_params = &vaapi_encode_h264_init_slice_params,
  721. .sequence_header_type = VAEncPackedHeaderSequence,
  722. .write_sequence_header = &vaapi_encode_h264_write_sequence_header,
  723. .slice_header_type = VAEncPackedHeaderH264_Slice,
  724. .write_slice_header = &vaapi_encode_h264_write_slice_header,
  725. .write_extra_header = &vaapi_encode_h264_write_extra_header,
  726. };
  727. static av_cold int vaapi_encode_h264_init(AVCodecContext *avctx)
  728. {
  729. VAAPIEncodeContext *ctx = avctx->priv_data;
  730. VAAPIEncodeH264Context *priv = avctx->priv_data;
  731. ctx->codec = &vaapi_encode_type_h264;
  732. if (avctx->profile == FF_PROFILE_UNKNOWN)
  733. avctx->profile = priv->profile;
  734. if (avctx->level == FF_LEVEL_UNKNOWN)
  735. avctx->level = priv->level;
  736. switch (avctx->profile) {
  737. case FF_PROFILE_H264_BASELINE:
  738. av_log(avctx, AV_LOG_WARNING, "H.264 baseline profile is not "
  739. "supported, using constrained baseline profile instead.\n");
  740. avctx->profile = FF_PROFILE_H264_CONSTRAINED_BASELINE;
  741. case FF_PROFILE_H264_CONSTRAINED_BASELINE:
  742. ctx->va_profile = VAProfileH264ConstrainedBaseline;
  743. if (avctx->max_b_frames != 0) {
  744. avctx->max_b_frames = 0;
  745. av_log(avctx, AV_LOG_WARNING, "H.264 constrained baseline profile "
  746. "doesn't support encoding with B frames, disabling them.\n");
  747. }
  748. break;
  749. case FF_PROFILE_H264_MAIN:
  750. ctx->va_profile = VAProfileH264Main;
  751. break;
  752. case FF_PROFILE_H264_EXTENDED:
  753. av_log(avctx, AV_LOG_ERROR, "H.264 extended profile "
  754. "is not supported.\n");
  755. return AVERROR_PATCHWELCOME;
  756. case FF_PROFILE_UNKNOWN:
  757. case FF_PROFILE_H264_HIGH:
  758. ctx->va_profile = VAProfileH264High;
  759. break;
  760. case FF_PROFILE_H264_HIGH_10:
  761. case FF_PROFILE_H264_HIGH_10_INTRA:
  762. av_log(avctx, AV_LOG_ERROR, "H.264 10-bit profiles "
  763. "are not supported.\n");
  764. return AVERROR_PATCHWELCOME;
  765. case FF_PROFILE_H264_HIGH_422:
  766. case FF_PROFILE_H264_HIGH_422_INTRA:
  767. case FF_PROFILE_H264_HIGH_444:
  768. case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
  769. case FF_PROFILE_H264_HIGH_444_INTRA:
  770. case FF_PROFILE_H264_CAVLC_444:
  771. av_log(avctx, AV_LOG_ERROR, "H.264 non-4:2:0 profiles "
  772. "are not supported.\n");
  773. return AVERROR_PATCHWELCOME;
  774. default:
  775. av_log(avctx, AV_LOG_ERROR, "Unknown H.264 profile %d.\n",
  776. avctx->profile);
  777. return AVERROR(EINVAL);
  778. }
  779. if (priv->low_power) {
  780. #if VA_CHECK_VERSION(0, 39, 2)
  781. ctx->va_entrypoint = VAEntrypointEncSliceLP;
  782. #else
  783. av_log(avctx, AV_LOG_ERROR, "Low-power encoding is not "
  784. "supported with this VAAPI version.\n");
  785. return AVERROR(EINVAL);
  786. #endif
  787. } else {
  788. ctx->va_entrypoint = VAEntrypointEncSlice;
  789. }
  790. // Only 8-bit encode is supported.
  791. ctx->va_rt_format = VA_RT_FORMAT_YUV420;
  792. if (avctx->bit_rate > 0) {
  793. if (avctx->rc_max_rate == avctx->bit_rate)
  794. ctx->va_rc_mode = VA_RC_CBR;
  795. else
  796. ctx->va_rc_mode = VA_RC_VBR;
  797. } else
  798. ctx->va_rc_mode = VA_RC_CQP;
  799. ctx->va_packed_headers =
  800. VA_ENC_PACKED_HEADER_SEQUENCE | // SPS and PPS.
  801. VA_ENC_PACKED_HEADER_SLICE | // Slice headers.
  802. VA_ENC_PACKED_HEADER_MISC; // SEI.
  803. ctx->surface_width = FFALIGN(avctx->width, 16);
  804. ctx->surface_height = FFALIGN(avctx->height, 16);
  805. return ff_vaapi_encode_init(avctx);
  806. }
  807. static av_cold int vaapi_encode_h264_close(AVCodecContext *avctx)
  808. {
  809. VAAPIEncodeH264Context *priv = avctx->priv_data;
  810. ff_cbs_close(&priv->cbc);
  811. av_freep(&priv->sei_identifier_string);
  812. return ff_vaapi_encode_close(avctx);
  813. }
  814. #define OFFSET(x) offsetof(VAAPIEncodeH264Context, x)
  815. #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM)
  816. static const AVOption vaapi_encode_h264_options[] = {
  817. { "qp", "Constant QP (for P-frames; scaled by qfactor/qoffset for I/B)",
  818. OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 20 }, 0, 52, FLAGS },
  819. { "quality", "Set encode quality (trades off against speed, higher is faster)",
  820. OFFSET(quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 8, FLAGS },
  821. { "low_power", "Use low-power encoding mode (experimental: only supported "
  822. "on some platforms, does not support all features)",
  823. OFFSET(low_power), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
  824. { "coder", "Entropy coder type",
  825. OFFSET(coder), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, FLAGS, "coder" },
  826. { "cavlc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS, "coder" },
  827. { "cabac", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, INT_MIN, INT_MAX, FLAGS, "coder" },
  828. { "vlc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, INT_MIN, INT_MAX, FLAGS, "coder" },
  829. { "ac", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, INT_MIN, INT_MAX, FLAGS, "coder" },
  830. { "aud", "Include AUD",
  831. OFFSET(aud), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
  832. { "sei", "Set SEI to include",
  833. OFFSET(sei), AV_OPT_TYPE_FLAGS,
  834. { .i64 = SEI_IDENTIFIER | SEI_TIMING | SEI_RECOVERY_POINT },
  835. 0, INT_MAX, FLAGS, "sei" },
  836. { "identifier", "Include encoder version identifier",
  837. 0, AV_OPT_TYPE_CONST, { .i64 = SEI_IDENTIFIER },
  838. INT_MIN, INT_MAX, FLAGS, "sei" },
  839. { "timing", "Include timing parameters (buffering_period and pic_timing)",
  840. 0, AV_OPT_TYPE_CONST, { .i64 = SEI_TIMING },
  841. INT_MIN, INT_MAX, FLAGS, "sei" },
  842. { "recovery_point", "Include recovery points where appropriate",
  843. 0, AV_OPT_TYPE_CONST, { .i64 = SEI_RECOVERY_POINT },
  844. INT_MIN, INT_MAX, FLAGS, "sei" },
  845. { "profile", "Set profile (profile_idc and constraint_set*_flag)",
  846. OFFSET(profile), AV_OPT_TYPE_INT,
  847. { .i64 = FF_PROFILE_H264_HIGH }, 0x0000, 0xffff, FLAGS, "profile" },
  848. #define PROFILE(name, value) name, NULL, 0, AV_OPT_TYPE_CONST, \
  849. { .i64 = value }, 0, 0, FLAGS, "profile"
  850. { PROFILE("constrained_baseline", FF_PROFILE_H264_CONSTRAINED_BASELINE) },
  851. { PROFILE("main", FF_PROFILE_H264_MAIN) },
  852. { PROFILE("high", FF_PROFILE_H264_HIGH) },
  853. #undef PROFILE
  854. { "level", "Set level (level_idc)",
  855. OFFSET(level), AV_OPT_TYPE_INT,
  856. { .i64 = 51 }, 0x00, 0xff, FLAGS, "level" },
  857. #define LEVEL(name, value) name, NULL, 0, AV_OPT_TYPE_CONST, \
  858. { .i64 = value }, 0, 0, FLAGS, "level"
  859. { LEVEL("1", 10) },
  860. { LEVEL("1.1", 11) },
  861. { LEVEL("1.2", 12) },
  862. { LEVEL("1.3", 13) },
  863. { LEVEL("2", 20) },
  864. { LEVEL("2.1", 21) },
  865. { LEVEL("2.2", 22) },
  866. { LEVEL("3", 30) },
  867. { LEVEL("3.1", 31) },
  868. { LEVEL("3.2", 32) },
  869. { LEVEL("4", 40) },
  870. { LEVEL("4.1", 41) },
  871. { LEVEL("4.2", 42) },
  872. { LEVEL("5", 50) },
  873. { LEVEL("5.1", 51) },
  874. { LEVEL("5.2", 52) },
  875. { LEVEL("6", 60) },
  876. { LEVEL("6.1", 61) },
  877. { LEVEL("6.2", 62) },
  878. #undef LEVEL
  879. { NULL },
  880. };
  881. static const AVCodecDefault vaapi_encode_h264_defaults[] = {
  882. { "b", "0" },
  883. { "bf", "2" },
  884. { "g", "120" },
  885. { "i_qfactor", "1" },
  886. { "i_qoffset", "0" },
  887. { "b_qfactor", "6/5" },
  888. { "b_qoffset", "0" },
  889. { "qmin", "0" },
  890. { NULL },
  891. };
  892. static const AVClass vaapi_encode_h264_class = {
  893. .class_name = "h264_vaapi",
  894. .item_name = av_default_item_name,
  895. .option = vaapi_encode_h264_options,
  896. .version = LIBAVUTIL_VERSION_INT,
  897. };
  898. AVCodec ff_h264_vaapi_encoder = {
  899. .name = "h264_vaapi",
  900. .long_name = NULL_IF_CONFIG_SMALL("H.264/AVC (VAAPI)"),
  901. .type = AVMEDIA_TYPE_VIDEO,
  902. .id = AV_CODEC_ID_H264,
  903. .priv_data_size = sizeof(VAAPIEncodeH264Context),
  904. .init = &vaapi_encode_h264_init,
  905. .encode2 = &ff_vaapi_encode2,
  906. .close = &vaapi_encode_h264_close,
  907. .priv_class = &vaapi_encode_h264_class,
  908. .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_HARDWARE,
  909. .defaults = vaapi_encode_h264_defaults,
  910. .pix_fmts = (const enum AVPixelFormat[]) {
  911. AV_PIX_FMT_VAAPI,
  912. AV_PIX_FMT_NONE,
  913. },
  914. .wrapper_name = "vaapi",
  915. };