You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1118 lines
39KB

  1. /*
  2. * This file is part of Libav.
  3. *
  4. * Libav is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * Libav is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with Libav; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <va/va.h>
  19. #include <va/va_enc_h264.h>
  20. #include "libavutil/avassert.h"
  21. #include "libavutil/internal.h"
  22. #include "libavutil/opt.h"
  23. #include "libavutil/pixfmt.h"
  24. #include "avcodec.h"
  25. #include "h264.h"
  26. #include "internal.h"
  27. #include "vaapi_encode.h"
  28. #include "vaapi_encode_h26x.h"
  29. enum {
  30. SLICE_TYPE_P = 0,
  31. SLICE_TYPE_B = 1,
  32. SLICE_TYPE_I = 2,
  33. SLICE_TYPE_SP = 3,
  34. SLICE_TYPE_SI = 4,
  35. };
  36. // This structure contains all possibly-useful per-sequence syntax elements
  37. // which are not already contained in the various VAAPI structures.
  38. typedef struct VAAPIEncodeH264MiscSequenceParams {
  39. unsigned int profile_idc;
  40. char constraint_set0_flag;
  41. char constraint_set1_flag;
  42. char constraint_set2_flag;
  43. char constraint_set3_flag;
  44. char constraint_set4_flag;
  45. char constraint_set5_flag;
  46. char separate_colour_plane_flag;
  47. char qpprime_y_zero_transform_bypass_flag;
  48. char gaps_in_frame_num_allowed_flag;
  49. char delta_pic_order_always_zero_flag;
  50. char bottom_field_pic_order_in_frame_present_flag;
  51. unsigned int num_slice_groups_minus1;
  52. unsigned int slice_group_map_type;
  53. int pic_init_qs_minus26;
  54. char overscan_info_present_flag;
  55. char overscan_appropriate_flag;
  56. char video_signal_type_present_flag;
  57. unsigned int video_format;
  58. char video_full_range_flag;
  59. char colour_description_present_flag;
  60. unsigned int colour_primaries;
  61. unsigned int transfer_characteristics;
  62. unsigned int matrix_coefficients;
  63. char chroma_loc_info_present_flag;
  64. unsigned int chroma_sample_loc_type_top_field;
  65. unsigned int chroma_sample_loc_type_bottom_field;
  66. // Some timing elements are in VAEncSequenceParameterBufferH264.
  67. char fixed_frame_rate_flag;
  68. char nal_hrd_parameters_present_flag;
  69. char vcl_hrd_parameters_present_flag;
  70. char low_delay_hrd_flag;
  71. char pic_struct_present_flag;
  72. char bitstream_restriction_flag;
  73. } VAAPIEncodeH264MiscSequenceParams;
  74. // This structure contains all possibly-useful per-slice syntax elements
  75. // which are not already contained in the various VAAPI structures.
  76. typedef struct VAAPIEncodeH264MiscSliceParams {
  77. unsigned int nal_unit_type;
  78. unsigned int nal_ref_idc;
  79. unsigned int colour_plane_id;
  80. char field_pic_flag;
  81. char bottom_field_flag;
  82. unsigned int redundant_pic_cnt;
  83. char sp_for_switch_flag;
  84. int slice_qs_delta;
  85. char ref_pic_list_modification_flag_l0;
  86. char ref_pic_list_modification_flag_l1;
  87. char no_output_of_prior_pics_flag;
  88. char long_term_reference_flag;
  89. char adaptive_ref_pic_marking_mode_flag;
  90. } VAAPIEncodeH264MiscSliceParams;
  91. typedef struct VAAPIEncodeH264Slice {
  92. VAAPIEncodeH264MiscSliceParams misc_slice_params;
  93. } VAAPIEncodeH264Slice;
  94. typedef struct VAAPIEncodeH264Context {
  95. VAAPIEncodeH264MiscSequenceParams misc_sequence_params;
  96. int mb_width;
  97. int mb_height;
  98. int fixed_qp_idr;
  99. int fixed_qp_p;
  100. int fixed_qp_b;
  101. int next_frame_num;
  102. int64_t idr_pic_count;
  103. // Rate control configuration.
  104. struct {
  105. VAEncMiscParameterBuffer misc;
  106. VAEncMiscParameterRateControl rc;
  107. } rc_params;
  108. struct {
  109. VAEncMiscParameterBuffer misc;
  110. VAEncMiscParameterHRD hrd;
  111. } hrd_params;
  112. #if VA_CHECK_VERSION(0, 36, 0)
  113. // Speed-quality tradeoff setting.
  114. struct {
  115. VAEncMiscParameterBuffer misc;
  116. VAEncMiscParameterBufferQualityLevel quality;
  117. } quality_params;
  118. #endif
  119. } VAAPIEncodeH264Context;
  120. typedef struct VAAPIEncodeH264Options {
  121. int qp;
  122. int quality;
  123. int low_power;
  124. } VAAPIEncodeH264Options;
  125. #define vseq_var(name) vseq->name, name
  126. #define vseq_field(name) vseq->seq_fields.bits.name, name
  127. #define vvui_field(name) vseq->vui_fields.bits.name, name
  128. #define vpic_var(name) vpic->name, name
  129. #define vpic_field(name) vpic->pic_fields.bits.name, name
  130. #define vslice_var(name) vslice->name, name
  131. #define vslice_field(name) vslice->slice_fields.bits.name, name
  132. #define mseq_var(name) mseq->name, name
  133. #define mslice_var(name) mslice->name, name
  134. static void vaapi_encode_h264_write_nal_header(PutBitContext *pbc,
  135. int nal_unit_type, int nal_ref_idc)
  136. {
  137. u(1, 0, forbidden_zero_bit);
  138. u(2, nal_ref_idc, nal_ref_idc);
  139. u(5, nal_unit_type, nal_unit_type);
  140. }
  141. static void vaapi_encode_h264_write_trailing_rbsp(PutBitContext *pbc)
  142. {
  143. u(1, 1, rbsp_stop_one_bit);
  144. while (put_bits_count(pbc) & 7)
  145. u(1, 0, rbsp_alignment_zero_bit);
  146. }
  147. static void vaapi_encode_h264_write_vui(PutBitContext *pbc,
  148. VAAPIEncodeContext *ctx)
  149. {
  150. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  151. VAAPIEncodeH264Context *priv = ctx->priv_data;
  152. VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params;
  153. u(1, vvui_field(aspect_ratio_info_present_flag));
  154. if (vseq->vui_fields.bits.aspect_ratio_info_present_flag) {
  155. u(8, vseq_var(aspect_ratio_idc));
  156. if (vseq->aspect_ratio_idc == 255) {
  157. u(16, vseq_var(sar_width));
  158. u(16, vseq_var(sar_height));
  159. }
  160. }
  161. u(1, mseq_var(overscan_info_present_flag));
  162. if (mseq->overscan_info_present_flag)
  163. u(1, mseq_var(overscan_appropriate_flag));
  164. u(1, mseq_var(video_signal_type_present_flag));
  165. if (mseq->video_signal_type_present_flag) {
  166. u(3, mseq_var(video_format));
  167. u(1, mseq_var(video_full_range_flag));
  168. u(1, mseq_var(colour_description_present_flag));
  169. if (mseq->colour_description_present_flag) {
  170. u(8, mseq_var(colour_primaries));
  171. u(8, mseq_var(transfer_characteristics));
  172. u(8, mseq_var(matrix_coefficients));
  173. }
  174. }
  175. u(1, mseq_var(chroma_loc_info_present_flag));
  176. if (mseq->chroma_loc_info_present_flag) {
  177. ue(mseq_var(chroma_sample_loc_type_top_field));
  178. ue(mseq_var(chroma_sample_loc_type_bottom_field));
  179. }
  180. u(1, vvui_field(timing_info_present_flag));
  181. if (vseq->vui_fields.bits.timing_info_present_flag) {
  182. u(32, vseq_var(num_units_in_tick));
  183. u(32, vseq_var(time_scale));
  184. u(1, mseq_var(fixed_frame_rate_flag));
  185. }
  186. u(1, mseq_var(nal_hrd_parameters_present_flag));
  187. if (mseq->nal_hrd_parameters_present_flag) {
  188. av_assert0(0 && "nal hrd parameters not supported");
  189. }
  190. u(1, mseq_var(vcl_hrd_parameters_present_flag));
  191. if (mseq->vcl_hrd_parameters_present_flag) {
  192. av_assert0(0 && "vcl hrd parameters not supported");
  193. }
  194. if (mseq->nal_hrd_parameters_present_flag ||
  195. mseq->vcl_hrd_parameters_present_flag)
  196. u(1, mseq_var(low_delay_hrd_flag));
  197. u(1, mseq_var(pic_struct_present_flag));
  198. u(1, vvui_field(bitstream_restriction_flag));
  199. if (vseq->vui_fields.bits.bitstream_restriction_flag) {
  200. av_assert0(0 && "bitstream restrictions not supported");
  201. }
  202. }
  203. static void vaapi_encode_h264_write_sps(PutBitContext *pbc,
  204. VAAPIEncodeContext *ctx)
  205. {
  206. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  207. VAAPIEncodeH264Context *priv = ctx->priv_data;
  208. VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params;
  209. int i;
  210. vaapi_encode_h264_write_nal_header(pbc, NAL_SPS, 3);
  211. u(8, mseq_var(profile_idc));
  212. u(1, mseq_var(constraint_set0_flag));
  213. u(1, mseq_var(constraint_set1_flag));
  214. u(1, mseq_var(constraint_set2_flag));
  215. u(1, mseq_var(constraint_set3_flag));
  216. u(1, mseq_var(constraint_set4_flag));
  217. u(1, mseq_var(constraint_set5_flag));
  218. u(2, 0, reserved_zero_2bits);
  219. u(8, vseq_var(level_idc));
  220. ue(vseq_var(seq_parameter_set_id));
  221. if (mseq->profile_idc == 100 || mseq->profile_idc == 110 ||
  222. mseq->profile_idc == 122 || mseq->profile_idc == 244 ||
  223. mseq->profile_idc == 44 || mseq->profile_idc == 83 ||
  224. mseq->profile_idc == 86 || mseq->profile_idc == 118 ||
  225. mseq->profile_idc == 128 || mseq->profile_idc == 138) {
  226. ue(vseq_field(chroma_format_idc));
  227. if (vseq->seq_fields.bits.chroma_format_idc == 3)
  228. u(1, mseq_var(separate_colour_plane_flag));
  229. ue(vseq_var(bit_depth_luma_minus8));
  230. ue(vseq_var(bit_depth_chroma_minus8));
  231. u(1, mseq_var(qpprime_y_zero_transform_bypass_flag));
  232. u(1, vseq_field(seq_scaling_matrix_present_flag));
  233. if (vseq->seq_fields.bits.seq_scaling_matrix_present_flag) {
  234. av_assert0(0 && "scaling matrices not supported");
  235. }
  236. }
  237. ue(vseq_field(log2_max_frame_num_minus4));
  238. ue(vseq_field(pic_order_cnt_type));
  239. if (vseq->seq_fields.bits.pic_order_cnt_type == 0) {
  240. ue(vseq_field(log2_max_pic_order_cnt_lsb_minus4));
  241. } else if (vseq->seq_fields.bits.pic_order_cnt_type == 1) {
  242. u(1, mseq_var(delta_pic_order_always_zero_flag));
  243. se(vseq_var(offset_for_non_ref_pic));
  244. se(vseq_var(offset_for_top_to_bottom_field));
  245. ue(vseq_var(num_ref_frames_in_pic_order_cnt_cycle));
  246. for (i = 0; i < vseq->num_ref_frames_in_pic_order_cnt_cycle; i++)
  247. se(vseq_var(offset_for_ref_frame[i]));
  248. }
  249. ue(vseq_var(max_num_ref_frames));
  250. u(1, mseq_var(gaps_in_frame_num_allowed_flag));
  251. ue(vseq->picture_width_in_mbs - 1, pic_width_in_mbs_minus1);
  252. ue(vseq->picture_height_in_mbs - 1, pic_height_in_mbs_minus1);
  253. u(1, vseq_field(frame_mbs_only_flag));
  254. if (!vseq->seq_fields.bits.frame_mbs_only_flag)
  255. u(1, vseq_field(mb_adaptive_frame_field_flag));
  256. u(1, vseq_field(direct_8x8_inference_flag));
  257. u(1, vseq_var(frame_cropping_flag));
  258. if (vseq->frame_cropping_flag) {
  259. ue(vseq_var(frame_crop_left_offset));
  260. ue(vseq_var(frame_crop_right_offset));
  261. ue(vseq_var(frame_crop_top_offset));
  262. ue(vseq_var(frame_crop_bottom_offset));
  263. }
  264. u(1, vseq_var(vui_parameters_present_flag));
  265. if (vseq->vui_parameters_present_flag)
  266. vaapi_encode_h264_write_vui(pbc, ctx);
  267. vaapi_encode_h264_write_trailing_rbsp(pbc);
  268. }
  269. static void vaapi_encode_h264_write_pps(PutBitContext *pbc,
  270. VAAPIEncodeContext *ctx)
  271. {
  272. VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params;
  273. VAAPIEncodeH264Context *priv = ctx->priv_data;
  274. VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params;
  275. vaapi_encode_h264_write_nal_header(pbc, NAL_PPS, 3);
  276. ue(vpic_var(pic_parameter_set_id));
  277. ue(vpic_var(seq_parameter_set_id));
  278. u(1, vpic_field(entropy_coding_mode_flag));
  279. u(1, mseq_var(bottom_field_pic_order_in_frame_present_flag));
  280. ue(mseq_var(num_slice_groups_minus1));
  281. if (mseq->num_slice_groups_minus1 > 0) {
  282. ue(mseq_var(slice_group_map_type));
  283. av_assert0(0 && "slice groups not supported");
  284. }
  285. ue(vpic_var(num_ref_idx_l0_active_minus1));
  286. ue(vpic_var(num_ref_idx_l1_active_minus1));
  287. u(1, vpic_field(weighted_pred_flag));
  288. u(2, vpic_field(weighted_bipred_idc));
  289. se(vpic->pic_init_qp - 26, pic_init_qp_minus26);
  290. se(mseq_var(pic_init_qs_minus26));
  291. se(vpic_var(chroma_qp_index_offset));
  292. u(1, vpic_field(deblocking_filter_control_present_flag));
  293. u(1, vpic_field(constrained_intra_pred_flag));
  294. u(1, vpic_field(redundant_pic_cnt_present_flag));
  295. u(1, vpic_field(transform_8x8_mode_flag));
  296. u(1, vpic_field(pic_scaling_matrix_present_flag));
  297. if (vpic->pic_fields.bits.pic_scaling_matrix_present_flag) {
  298. av_assert0(0 && "scaling matrices not supported");
  299. }
  300. se(vpic_var(second_chroma_qp_index_offset));
  301. vaapi_encode_h264_write_trailing_rbsp(pbc);
  302. }
  303. static void vaapi_encode_h264_write_slice_header2(PutBitContext *pbc,
  304. VAAPIEncodeContext *ctx,
  305. VAAPIEncodePicture *pic,
  306. VAAPIEncodeSlice *slice)
  307. {
  308. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  309. VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params;
  310. VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params;
  311. VAAPIEncodeH264Context *priv = ctx->priv_data;
  312. VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params;
  313. VAAPIEncodeH264Slice *pslice = slice->priv_data;
  314. VAAPIEncodeH264MiscSliceParams *mslice = &pslice->misc_slice_params;
  315. vaapi_encode_h264_write_nal_header(pbc, mslice->nal_unit_type,
  316. mslice->nal_ref_idc);
  317. ue(vslice->macroblock_address, first_mb_in_slice);
  318. ue(vslice_var(slice_type));
  319. ue(vpic_var(pic_parameter_set_id));
  320. if (mseq->separate_colour_plane_flag) {
  321. u(2, mslice_var(colour_plane_id));
  322. }
  323. u(4 + vseq->seq_fields.bits.log2_max_frame_num_minus4,
  324. (vpic->frame_num &
  325. ((1 << (4 + vseq->seq_fields.bits.log2_max_frame_num_minus4)) - 1)),
  326. frame_num);
  327. if (!vseq->seq_fields.bits.frame_mbs_only_flag) {
  328. u(1, mslice_var(field_pic_flag));
  329. if (mslice->field_pic_flag)
  330. u(1, mslice_var(bottom_field_flag));
  331. }
  332. if (vpic->pic_fields.bits.idr_pic_flag) {
  333. ue(vslice_var(idr_pic_id));
  334. }
  335. if (vseq->seq_fields.bits.pic_order_cnt_type == 0) {
  336. u(4 + vseq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4,
  337. vslice_var(pic_order_cnt_lsb));
  338. if (mseq->bottom_field_pic_order_in_frame_present_flag &&
  339. !mslice->field_pic_flag) {
  340. se(vslice_var(delta_pic_order_cnt_bottom));
  341. }
  342. }
  343. if (vseq->seq_fields.bits.pic_order_cnt_type == 1 &&
  344. !vseq->seq_fields.bits.delta_pic_order_always_zero_flag) {
  345. se(vslice_var(delta_pic_order_cnt[0]));
  346. if (mseq->bottom_field_pic_order_in_frame_present_flag &&
  347. !mslice->field_pic_flag) {
  348. se(vslice_var(delta_pic_order_cnt[1]));
  349. }
  350. }
  351. if (vpic->pic_fields.bits.redundant_pic_cnt_present_flag) {
  352. ue(mslice_var(redundant_pic_cnt));
  353. }
  354. if (vslice->slice_type == SLICE_TYPE_B) {
  355. u(1, vslice_var(direct_spatial_mv_pred_flag));
  356. }
  357. if (vslice->slice_type == SLICE_TYPE_P ||
  358. vslice->slice_type == SLICE_TYPE_SP ||
  359. vslice->slice_type == SLICE_TYPE_B) {
  360. u(1, vslice_var(num_ref_idx_active_override_flag));
  361. if (vslice->num_ref_idx_active_override_flag) {
  362. ue(vslice_var(num_ref_idx_l0_active_minus1));
  363. if (vslice->slice_type == SLICE_TYPE_B)
  364. ue(vslice_var(num_ref_idx_l1_active_minus1));
  365. }
  366. }
  367. if (mslice->nal_unit_type == 20 || mslice->nal_unit_type == 21) {
  368. av_assert0(0 && "no MVC support");
  369. } else {
  370. if (vslice->slice_type % 5 != 2 && vslice->slice_type % 5 != 4) {
  371. u(1, mslice_var(ref_pic_list_modification_flag_l0));
  372. if (mslice->ref_pic_list_modification_flag_l0) {
  373. av_assert0(0 && "ref pic list modification");
  374. }
  375. }
  376. if (vslice->slice_type % 5 == 1) {
  377. u(1, mslice_var(ref_pic_list_modification_flag_l1));
  378. if (mslice->ref_pic_list_modification_flag_l1) {
  379. av_assert0(0 && "ref pic list modification");
  380. }
  381. }
  382. }
  383. if ((vpic->pic_fields.bits.weighted_pred_flag &&
  384. (vslice->slice_type == SLICE_TYPE_P ||
  385. vslice->slice_type == SLICE_TYPE_SP)) ||
  386. (vpic->pic_fields.bits.weighted_bipred_idc == 1 &&
  387. vslice->slice_type == SLICE_TYPE_B)) {
  388. av_assert0(0 && "prediction weights not supported");
  389. }
  390. av_assert0(mslice->nal_ref_idc > 0 ==
  391. vpic->pic_fields.bits.reference_pic_flag);
  392. if (mslice->nal_ref_idc != 0) {
  393. if (vpic->pic_fields.bits.idr_pic_flag) {
  394. u(1, mslice_var(no_output_of_prior_pics_flag));
  395. u(1, mslice_var(long_term_reference_flag));
  396. } else {
  397. u(1, mslice_var(adaptive_ref_pic_marking_mode_flag));
  398. if (mslice->adaptive_ref_pic_marking_mode_flag) {
  399. av_assert0(0 && "MMCOs not supported");
  400. }
  401. }
  402. }
  403. if (vpic->pic_fields.bits.entropy_coding_mode_flag &&
  404. vslice->slice_type != SLICE_TYPE_I &&
  405. vslice->slice_type != SLICE_TYPE_SI) {
  406. ue(vslice_var(cabac_init_idc));
  407. }
  408. se(vslice_var(slice_qp_delta));
  409. if (vslice->slice_type == SLICE_TYPE_SP ||
  410. vslice->slice_type == SLICE_TYPE_SI) {
  411. if (vslice->slice_type == SLICE_TYPE_SP)
  412. u(1, mslice_var(sp_for_switch_flag));
  413. se(mslice_var(slice_qs_delta));
  414. }
  415. if (vpic->pic_fields.bits.deblocking_filter_control_present_flag) {
  416. ue(vslice_var(disable_deblocking_filter_idc));
  417. if (vslice->disable_deblocking_filter_idc != 1) {
  418. se(vslice_var(slice_alpha_c0_offset_div2));
  419. se(vslice_var(slice_beta_offset_div2));
  420. }
  421. }
  422. if (mseq->num_slice_groups_minus1 > 0 &&
  423. mseq->slice_group_map_type >= 3 && mseq->slice_group_map_type <= 5) {
  424. av_assert0(0 && "slice groups not supported");
  425. }
  426. // No alignment - this need not be a byte boundary.
  427. }
  428. static int vaapi_encode_h264_write_sequence_header(AVCodecContext *avctx,
  429. char *data, size_t *data_len)
  430. {
  431. VAAPIEncodeContext *ctx = avctx->priv_data;
  432. PutBitContext pbc;
  433. char tmp[256];
  434. int err;
  435. size_t nal_len, bit_len, bit_pos, next_len;
  436. bit_len = *data_len;
  437. bit_pos = 0;
  438. init_put_bits(&pbc, tmp, sizeof(tmp));
  439. vaapi_encode_h264_write_sps(&pbc, ctx);
  440. nal_len = put_bits_count(&pbc);
  441. flush_put_bits(&pbc);
  442. next_len = bit_len - bit_pos;
  443. err = ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data + bit_pos / 8,
  444. &next_len,
  445. tmp, nal_len);
  446. if (err < 0)
  447. return err;
  448. bit_pos += next_len;
  449. init_put_bits(&pbc, tmp, sizeof(tmp));
  450. vaapi_encode_h264_write_pps(&pbc, ctx);
  451. nal_len = put_bits_count(&pbc);
  452. flush_put_bits(&pbc);
  453. next_len = bit_len - bit_pos;
  454. err = ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data + bit_pos / 8,
  455. &next_len,
  456. tmp, nal_len);
  457. if (err < 0)
  458. return err;
  459. bit_pos += next_len;
  460. *data_len = bit_pos;
  461. return 0;
  462. }
  463. static int vaapi_encode_h264_write_slice_header(AVCodecContext *avctx,
  464. VAAPIEncodePicture *pic,
  465. VAAPIEncodeSlice *slice,
  466. char *data, size_t *data_len)
  467. {
  468. VAAPIEncodeContext *ctx = avctx->priv_data;
  469. PutBitContext pbc;
  470. char tmp[256];
  471. size_t header_len;
  472. init_put_bits(&pbc, tmp, sizeof(tmp));
  473. vaapi_encode_h264_write_slice_header2(&pbc, ctx, pic, slice);
  474. header_len = put_bits_count(&pbc);
  475. flush_put_bits(&pbc);
  476. return ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data, data_len,
  477. tmp, header_len);
  478. }
  479. static int vaapi_encode_h264_init_sequence_params(AVCodecContext *avctx)
  480. {
  481. VAAPIEncodeContext *ctx = avctx->priv_data;
  482. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  483. VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params;
  484. VAAPIEncodeH264Context *priv = ctx->priv_data;
  485. VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params;
  486. int i;
  487. {
  488. vseq->seq_parameter_set_id = 0;
  489. vseq->level_idc = avctx->level;
  490. vseq->max_num_ref_frames = 2;
  491. vseq->picture_width_in_mbs = priv->mb_width;
  492. vseq->picture_height_in_mbs = priv->mb_height;
  493. vseq->seq_fields.bits.chroma_format_idc = 1;
  494. vseq->seq_fields.bits.frame_mbs_only_flag = 1;
  495. vseq->seq_fields.bits.direct_8x8_inference_flag = 1;
  496. vseq->seq_fields.bits.log2_max_frame_num_minus4 = 4;
  497. vseq->seq_fields.bits.pic_order_cnt_type = 0;
  498. if (ctx->input_width != ctx->aligned_width ||
  499. ctx->input_height != ctx->aligned_height) {
  500. vseq->frame_cropping_flag = 1;
  501. vseq->frame_crop_left_offset = 0;
  502. vseq->frame_crop_right_offset =
  503. (ctx->aligned_width - ctx->input_width) / 2;
  504. vseq->frame_crop_top_offset = 0;
  505. vseq->frame_crop_bottom_offset =
  506. (ctx->aligned_height - ctx->input_height) / 2;
  507. } else {
  508. vseq->frame_cropping_flag = 0;
  509. }
  510. vseq->vui_parameters_present_flag = 1;
  511. if (avctx->sample_aspect_ratio.num != 0) {
  512. vseq->vui_fields.bits.aspect_ratio_info_present_flag = 1;
  513. // There is a large enum of these which we could support
  514. // individually rather than using the generic X/Y form?
  515. if (avctx->sample_aspect_ratio.num ==
  516. avctx->sample_aspect_ratio.den) {
  517. vseq->aspect_ratio_idc = 1;
  518. } else {
  519. vseq->aspect_ratio_idc = 255; // Extended SAR.
  520. vseq->sar_width = avctx->sample_aspect_ratio.num;
  521. vseq->sar_height = avctx->sample_aspect_ratio.den;
  522. }
  523. }
  524. if (avctx->color_primaries != AVCOL_PRI_UNSPECIFIED ||
  525. avctx->color_trc != AVCOL_TRC_UNSPECIFIED ||
  526. avctx->colorspace != AVCOL_SPC_UNSPECIFIED) {
  527. mseq->video_signal_type_present_flag = 1;
  528. mseq->video_format = 5; // Unspecified.
  529. mseq->video_full_range_flag = 0;
  530. mseq->colour_description_present_flag = 1;
  531. // These enums are derived from the standard and hence
  532. // we can just use the values directly.
  533. mseq->colour_primaries = avctx->color_primaries;
  534. mseq->transfer_characteristics = avctx->color_trc;
  535. mseq->matrix_coefficients = avctx->colorspace;
  536. }
  537. vseq->bits_per_second = avctx->bit_rate;
  538. vseq->vui_fields.bits.timing_info_present_flag = 1;
  539. if (avctx->framerate.num > 0 && avctx->framerate.den > 0) {
  540. vseq->num_units_in_tick = avctx->framerate.num;
  541. vseq->time_scale = 2 * avctx->framerate.den;
  542. mseq->fixed_frame_rate_flag = 1;
  543. } else {
  544. vseq->num_units_in_tick = avctx->time_base.num;
  545. vseq->time_scale = 2 * avctx->time_base.den;
  546. mseq->fixed_frame_rate_flag = 0;
  547. }
  548. vseq->intra_period = ctx->p_per_i * (ctx->b_per_p + 1);
  549. vseq->intra_idr_period = vseq->intra_period;
  550. vseq->ip_period = ctx->b_per_p + 1;
  551. }
  552. {
  553. vpic->CurrPic.picture_id = VA_INVALID_ID;
  554. vpic->CurrPic.flags = VA_PICTURE_H264_INVALID;
  555. for (i = 0; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) {
  556. vpic->ReferenceFrames[i].picture_id = VA_INVALID_ID;
  557. vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
  558. }
  559. vpic->coded_buf = VA_INVALID_ID;
  560. vpic->pic_parameter_set_id = 0;
  561. vpic->seq_parameter_set_id = 0;
  562. vpic->num_ref_idx_l0_active_minus1 = 0;
  563. vpic->num_ref_idx_l1_active_minus1 = 0;
  564. vpic->pic_fields.bits.entropy_coding_mode_flag =
  565. ((avctx->profile & 0xff) != 66);
  566. vpic->pic_fields.bits.weighted_pred_flag = 0;
  567. vpic->pic_fields.bits.weighted_bipred_idc = 0;
  568. vpic->pic_fields.bits.transform_8x8_mode_flag =
  569. ((avctx->profile & 0xff) >= 100);
  570. vpic->pic_init_qp = priv->fixed_qp_idr;
  571. }
  572. {
  573. mseq->profile_idc = avctx->profile & 0xff;
  574. if (avctx->profile & FF_PROFILE_H264_CONSTRAINED)
  575. mseq->constraint_set1_flag = 1;
  576. if (avctx->profile & FF_PROFILE_H264_INTRA)
  577. mseq->constraint_set3_flag = 1;
  578. }
  579. return 0;
  580. }
  581. static int vaapi_encode_h264_init_picture_params(AVCodecContext *avctx,
  582. VAAPIEncodePicture *pic)
  583. {
  584. VAAPIEncodeContext *ctx = avctx->priv_data;
  585. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  586. VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params;
  587. VAAPIEncodeH264Context *priv = ctx->priv_data;
  588. int i;
  589. if (pic->type == PICTURE_TYPE_IDR) {
  590. av_assert0(pic->display_order == pic->encode_order);
  591. vpic->frame_num = 0;
  592. priv->next_frame_num = 1;
  593. } else {
  594. vpic->frame_num = priv->next_frame_num;
  595. if (pic->type != PICTURE_TYPE_B) {
  596. // nal_ref_idc != 0
  597. ++priv->next_frame_num;
  598. }
  599. }
  600. vpic->frame_num = vpic->frame_num &
  601. ((1 << (4 + vseq->seq_fields.bits.log2_max_frame_num_minus4)) - 1);
  602. vpic->CurrPic.picture_id = pic->recon_surface;
  603. vpic->CurrPic.frame_idx = vpic->frame_num;
  604. vpic->CurrPic.flags = 0;
  605. vpic->CurrPic.TopFieldOrderCnt = pic->display_order;
  606. vpic->CurrPic.BottomFieldOrderCnt = pic->display_order;
  607. for (i = 0; i < pic->nb_refs; i++) {
  608. VAAPIEncodePicture *ref = pic->refs[i];
  609. av_assert0(ref && ref->encode_order < pic->encode_order);
  610. vpic->ReferenceFrames[i].picture_id = ref->recon_surface;
  611. vpic->ReferenceFrames[i].frame_idx = ref->encode_order;
  612. vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
  613. vpic->ReferenceFrames[i].TopFieldOrderCnt = ref->display_order;
  614. vpic->ReferenceFrames[i].BottomFieldOrderCnt = ref->display_order;
  615. }
  616. for (; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) {
  617. vpic->ReferenceFrames[i].picture_id = VA_INVALID_ID;
  618. vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
  619. }
  620. vpic->coded_buf = pic->output_buffer;
  621. vpic->pic_fields.bits.idr_pic_flag = (pic->type == PICTURE_TYPE_IDR);
  622. vpic->pic_fields.bits.reference_pic_flag = (pic->type != PICTURE_TYPE_B);
  623. pic->nb_slices = 1;
  624. return 0;
  625. }
  626. static int vaapi_encode_h264_init_slice_params(AVCodecContext *avctx,
  627. VAAPIEncodePicture *pic,
  628. VAAPIEncodeSlice *slice)
  629. {
  630. VAAPIEncodeContext *ctx = avctx->priv_data;
  631. VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params;
  632. VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params;
  633. VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params;
  634. VAAPIEncodeH264Context *priv = ctx->priv_data;
  635. VAAPIEncodeH264Slice *pslice;
  636. VAAPIEncodeH264MiscSliceParams *mslice;
  637. int i;
  638. slice->priv_data = av_mallocz(sizeof(*pslice));
  639. if (!slice->priv_data)
  640. return AVERROR(ENOMEM);
  641. pslice = slice->priv_data;
  642. mslice = &pslice->misc_slice_params;
  643. if (pic->type == PICTURE_TYPE_IDR)
  644. mslice->nal_unit_type = NAL_IDR_SLICE;
  645. else
  646. mslice->nal_unit_type = NAL_SLICE;
  647. switch (pic->type) {
  648. case PICTURE_TYPE_IDR:
  649. vslice->slice_type = SLICE_TYPE_I;
  650. mslice->nal_ref_idc = 3;
  651. break;
  652. case PICTURE_TYPE_I:
  653. vslice->slice_type = SLICE_TYPE_I;
  654. mslice->nal_ref_idc = 2;
  655. break;
  656. case PICTURE_TYPE_P:
  657. vslice->slice_type = SLICE_TYPE_P;
  658. mslice->nal_ref_idc = 1;
  659. break;
  660. case PICTURE_TYPE_B:
  661. vslice->slice_type = SLICE_TYPE_B;
  662. mslice->nal_ref_idc = 0;
  663. break;
  664. default:
  665. av_assert0(0 && "invalid picture type");
  666. }
  667. // Only one slice per frame.
  668. vslice->macroblock_address = 0;
  669. vslice->num_macroblocks = priv->mb_width * priv->mb_height;
  670. vslice->macroblock_info = VA_INVALID_ID;
  671. vslice->pic_parameter_set_id = vpic->pic_parameter_set_id;
  672. vslice->idr_pic_id = priv->idr_pic_count++;
  673. vslice->pic_order_cnt_lsb = pic->display_order &
  674. ((1 << (4 + vseq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4)) - 1);
  675. for (i = 0; i < FF_ARRAY_ELEMS(vslice->RefPicList0); i++) {
  676. vslice->RefPicList0[i].picture_id = VA_INVALID_ID;
  677. vslice->RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
  678. vslice->RefPicList1[i].picture_id = VA_INVALID_ID;
  679. vslice->RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
  680. }
  681. av_assert0(pic->nb_refs <= 2);
  682. if (pic->nb_refs >= 1) {
  683. // Backward reference for P- or B-frame.
  684. av_assert0(pic->type == PICTURE_TYPE_P ||
  685. pic->type == PICTURE_TYPE_B);
  686. vslice->num_ref_idx_l0_active_minus1 = 0;
  687. vslice->RefPicList0[0] = vpic->ReferenceFrames[0];
  688. }
  689. if (pic->nb_refs >= 2) {
  690. // Forward reference for B-frame.
  691. av_assert0(pic->type == PICTURE_TYPE_B);
  692. vslice->num_ref_idx_l1_active_minus1 = 0;
  693. vslice->RefPicList1[0] = vpic->ReferenceFrames[1];
  694. }
  695. if (pic->type == PICTURE_TYPE_B)
  696. vslice->slice_qp_delta = priv->fixed_qp_b - vpic->pic_init_qp;
  697. else if (pic->type == PICTURE_TYPE_P)
  698. vslice->slice_qp_delta = priv->fixed_qp_p - vpic->pic_init_qp;
  699. else
  700. vslice->slice_qp_delta = priv->fixed_qp_idr - vpic->pic_init_qp;
  701. vslice->direct_spatial_mv_pred_flag = 1;
  702. return 0;
  703. }
  704. static av_cold int vaapi_encode_h264_init_constant_bitrate(AVCodecContext *avctx)
  705. {
  706. VAAPIEncodeContext *ctx = avctx->priv_data;
  707. VAAPIEncodeH264Context *priv = ctx->priv_data;
  708. int hrd_buffer_size;
  709. int hrd_initial_buffer_fullness;
  710. if (avctx->rc_buffer_size)
  711. hrd_buffer_size = avctx->rc_buffer_size;
  712. else
  713. hrd_buffer_size = avctx->bit_rate;
  714. if (avctx->rc_initial_buffer_occupancy)
  715. hrd_initial_buffer_fullness = avctx->rc_initial_buffer_occupancy;
  716. else
  717. hrd_initial_buffer_fullness = hrd_buffer_size * 3 / 4;
  718. priv->rc_params.misc.type = VAEncMiscParameterTypeRateControl;
  719. priv->rc_params.rc = (VAEncMiscParameterRateControl) {
  720. .bits_per_second = avctx->bit_rate,
  721. .target_percentage = 66,
  722. .window_size = 1000,
  723. .initial_qp = (avctx->qmax >= 0 ? avctx->qmax : 40),
  724. .min_qp = (avctx->qmin >= 0 ? avctx->qmin : 18),
  725. .basic_unit_size = 0,
  726. };
  727. ctx->global_params[ctx->nb_global_params] =
  728. &priv->rc_params.misc;
  729. ctx->global_params_size[ctx->nb_global_params++] =
  730. sizeof(priv->rc_params);
  731. priv->hrd_params.misc.type = VAEncMiscParameterTypeHRD;
  732. priv->hrd_params.hrd = (VAEncMiscParameterHRD) {
  733. .initial_buffer_fullness = hrd_initial_buffer_fullness,
  734. .buffer_size = hrd_buffer_size,
  735. };
  736. ctx->global_params[ctx->nb_global_params] =
  737. &priv->hrd_params.misc;
  738. ctx->global_params_size[ctx->nb_global_params++] =
  739. sizeof(priv->hrd_params);
  740. // These still need to be set for pic_init_qp/slice_qp_delta.
  741. priv->fixed_qp_idr = 26;
  742. priv->fixed_qp_p = 26;
  743. priv->fixed_qp_b = 26;
  744. av_log(avctx, AV_LOG_DEBUG, "Using constant-bitrate = %d bps.\n",
  745. avctx->bit_rate);
  746. return 0;
  747. }
  748. static av_cold int vaapi_encode_h264_init_fixed_qp(AVCodecContext *avctx)
  749. {
  750. VAAPIEncodeContext *ctx = avctx->priv_data;
  751. VAAPIEncodeH264Context *priv = ctx->priv_data;
  752. VAAPIEncodeH264Options *opt = ctx->codec_options;
  753. priv->fixed_qp_p = opt->qp;
  754. if (avctx->i_quant_factor > 0.0)
  755. priv->fixed_qp_idr = (int)((priv->fixed_qp_p * avctx->i_quant_factor +
  756. avctx->i_quant_offset) + 0.5);
  757. else
  758. priv->fixed_qp_idr = priv->fixed_qp_p;
  759. if (avctx->b_quant_factor > 0.0)
  760. priv->fixed_qp_b = (int)((priv->fixed_qp_p * avctx->b_quant_factor +
  761. avctx->b_quant_offset) + 0.5);
  762. else
  763. priv->fixed_qp_b = priv->fixed_qp_p;
  764. av_log(avctx, AV_LOG_DEBUG, "Using fixed QP = "
  765. "%d / %d / %d for IDR- / P- / B-frames.\n",
  766. priv->fixed_qp_idr, priv->fixed_qp_p, priv->fixed_qp_b);
  767. return 0;
  768. }
  769. static av_cold int vaapi_encode_h264_init_internal(AVCodecContext *avctx)
  770. {
  771. static const VAConfigAttrib default_config_attributes[] = {
  772. { .type = VAConfigAttribRTFormat,
  773. .value = VA_RT_FORMAT_YUV420 },
  774. { .type = VAConfigAttribEncPackedHeaders,
  775. .value = (VA_ENC_PACKED_HEADER_SEQUENCE |
  776. VA_ENC_PACKED_HEADER_SLICE) },
  777. };
  778. VAAPIEncodeContext *ctx = avctx->priv_data;
  779. VAAPIEncodeH264Context *priv = ctx->priv_data;
  780. VAAPIEncodeH264Options *opt = ctx->codec_options;
  781. int i, err;
  782. switch (avctx->profile) {
  783. case FF_PROFILE_H264_CONSTRAINED_BASELINE:
  784. ctx->va_profile = VAProfileH264ConstrainedBaseline;
  785. break;
  786. case FF_PROFILE_H264_BASELINE:
  787. ctx->va_profile = VAProfileH264Baseline;
  788. break;
  789. case FF_PROFILE_H264_MAIN:
  790. ctx->va_profile = VAProfileH264Main;
  791. break;
  792. case FF_PROFILE_H264_EXTENDED:
  793. av_log(avctx, AV_LOG_ERROR, "H.264 extended profile "
  794. "is not supported.\n");
  795. return AVERROR_PATCHWELCOME;
  796. case FF_PROFILE_UNKNOWN:
  797. case FF_PROFILE_H264_HIGH:
  798. ctx->va_profile = VAProfileH264High;
  799. break;
  800. case FF_PROFILE_H264_HIGH_10:
  801. case FF_PROFILE_H264_HIGH_10_INTRA:
  802. av_log(avctx, AV_LOG_ERROR, "H.264 10-bit profiles "
  803. "are not supported.\n");
  804. return AVERROR_PATCHWELCOME;
  805. case FF_PROFILE_H264_HIGH_422:
  806. case FF_PROFILE_H264_HIGH_422_INTRA:
  807. case FF_PROFILE_H264_HIGH_444:
  808. case FF_PROFILE_H264_HIGH_444_PREDICTIVE:
  809. case FF_PROFILE_H264_HIGH_444_INTRA:
  810. case FF_PROFILE_H264_CAVLC_444:
  811. av_log(avctx, AV_LOG_ERROR, "H.264 non-4:2:0 profiles "
  812. "are not supported.\n");
  813. return AVERROR_PATCHWELCOME;
  814. default:
  815. av_log(avctx, AV_LOG_ERROR, "Unknown H.264 profile %d.\n",
  816. avctx->profile);
  817. return AVERROR(EINVAL);
  818. }
  819. if (opt->low_power) {
  820. #if VA_CHECK_VERSION(0, 39, 1)
  821. ctx->va_entrypoint = VAEntrypointEncSliceLP;
  822. #else
  823. av_log(avctx, AV_LOG_ERROR, "Low-power encoding is not "
  824. "supported with this VAAPI version.\n");
  825. return AVERROR(EINVAL);
  826. #endif
  827. } else {
  828. ctx->va_entrypoint = VAEntrypointEncSlice;
  829. }
  830. ctx->input_width = avctx->width;
  831. ctx->input_height = avctx->height;
  832. ctx->aligned_width = FFALIGN(ctx->input_width, 16);
  833. ctx->aligned_height = FFALIGN(ctx->input_height, 16);
  834. priv->mb_width = ctx->aligned_width / 16;
  835. priv->mb_height = ctx->aligned_height / 16;
  836. for (i = 0; i < FF_ARRAY_ELEMS(default_config_attributes); i++) {
  837. ctx->config_attributes[ctx->nb_config_attributes++] =
  838. default_config_attributes[i];
  839. }
  840. if (avctx->bit_rate > 0) {
  841. ctx->va_rc_mode = VA_RC_CBR;
  842. err = vaapi_encode_h264_init_constant_bitrate(avctx);
  843. } else {
  844. ctx->va_rc_mode = VA_RC_CQP;
  845. err = vaapi_encode_h264_init_fixed_qp(avctx);
  846. }
  847. if (err < 0)
  848. return err;
  849. ctx->config_attributes[ctx->nb_config_attributes++] = (VAConfigAttrib) {
  850. .type = VAConfigAttribRateControl,
  851. .value = ctx->va_rc_mode,
  852. };
  853. if (opt->quality > 0) {
  854. #if VA_CHECK_VERSION(0, 36, 0)
  855. priv->quality_params.misc.type =
  856. VAEncMiscParameterTypeQualityLevel;
  857. priv->quality_params.quality.quality_level = opt->quality;
  858. ctx->global_params[ctx->nb_global_params] =
  859. &priv->quality_params.misc;
  860. ctx->global_params_size[ctx->nb_global_params++] =
  861. sizeof(priv->quality_params);
  862. #else
  863. av_log(avctx, AV_LOG_WARNING, "The encode quality option is not "
  864. "supported with this VAAPI version.\n");
  865. #endif
  866. }
  867. ctx->nb_recon_frames = 20;
  868. return 0;
  869. }
  870. static VAAPIEncodeType vaapi_encode_type_h264 = {
  871. .priv_data_size = sizeof(VAAPIEncodeH264Context),
  872. .init = &vaapi_encode_h264_init_internal,
  873. .sequence_params_size = sizeof(VAEncSequenceParameterBufferH264),
  874. .init_sequence_params = &vaapi_encode_h264_init_sequence_params,
  875. .picture_params_size = sizeof(VAEncPictureParameterBufferH264),
  876. .init_picture_params = &vaapi_encode_h264_init_picture_params,
  877. .slice_params_size = sizeof(VAEncSliceParameterBufferH264),
  878. .init_slice_params = &vaapi_encode_h264_init_slice_params,
  879. .sequence_header_type = VAEncPackedHeaderSequence,
  880. .write_sequence_header = &vaapi_encode_h264_write_sequence_header,
  881. .slice_header_type = VAEncPackedHeaderH264_Slice,
  882. .write_slice_header = &vaapi_encode_h264_write_slice_header,
  883. };
  884. static av_cold int vaapi_encode_h264_init(AVCodecContext *avctx)
  885. {
  886. return ff_vaapi_encode_init(avctx, &vaapi_encode_type_h264);
  887. }
  888. #define OFFSET(x) (offsetof(VAAPIEncodeContext, codec_options_data) + \
  889. offsetof(VAAPIEncodeH264Options, x))
  890. #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM)
  891. static const AVOption vaapi_encode_h264_options[] = {
  892. { "qp", "Constant QP (for P-frames; scaled by qfactor/qoffset for I/B)",
  893. OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 20 }, 0, 52, FLAGS },
  894. { "quality", "Set encode quality (trades off against speed, higher is faster)",
  895. OFFSET(quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 8, FLAGS },
  896. { "low_power", "Use low-power encoding mode (experimental: only supported "
  897. "on some platforms, does not support all features)",
  898. OFFSET(low_power), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
  899. { NULL },
  900. };
  901. static const AVCodecDefault vaapi_encode_h264_defaults[] = {
  902. { "profile", "100" },
  903. { "level", "51" },
  904. { "b", "0" },
  905. { "bf", "2" },
  906. { "g", "120" },
  907. { "i_qfactor", "1.0" },
  908. { "i_qoffset", "0.0" },
  909. { "b_qfactor", "1.2" },
  910. { "b_qoffset", "0.0" },
  911. { NULL },
  912. };
  913. static const AVClass vaapi_encode_h264_class = {
  914. .class_name = "h264_vaapi",
  915. .item_name = av_default_item_name,
  916. .option = vaapi_encode_h264_options,
  917. .version = LIBAVUTIL_VERSION_INT,
  918. };
  919. AVCodec ff_h264_vaapi_encoder = {
  920. .name = "h264_vaapi",
  921. .long_name = NULL_IF_CONFIG_SMALL("H.264/AVC (VAAPI)"),
  922. .type = AVMEDIA_TYPE_VIDEO,
  923. .id = AV_CODEC_ID_H264,
  924. .priv_data_size = (sizeof(VAAPIEncodeContext) +
  925. sizeof(VAAPIEncodeH264Options)),
  926. .init = &vaapi_encode_h264_init,
  927. .encode2 = &ff_vaapi_encode2,
  928. .close = &ff_vaapi_encode_close,
  929. .priv_class = &vaapi_encode_h264_class,
  930. .capabilities = AV_CODEC_CAP_DELAY,
  931. .defaults = vaapi_encode_h264_defaults,
  932. .pix_fmts = (const enum AVPixelFormat[]) {
  933. AV_PIX_FMT_VAAPI,
  934. AV_PIX_FMT_NONE,
  935. },
  936. };