You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

489 lines
20KB

  1. /*
  2. * HEVC HW decode acceleration through VA API
  3. *
  4. * Copyright (C) 2015 Timo Rothenpieler <timo@rothenpieler.org>
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. #include "vaapi_internal.h"
  23. #include "hevc.h"
  24. #include "mpegutils.h"
  25. /**
  26. * @file
  27. * This file implements the glue code between FFmpeg's and VA API's
  28. * structures for HEVC decoding.
  29. */
  30. typedef struct vaapi_hevc_frame_data {
  31. VAPictureParameterBufferHEVC *pic_param;
  32. VASliceParameterBufferHEVC *last_slice_param;
  33. } vaapi_hevc_frame_data;
  34. /**
  35. * Initialize an empty VA API picture.
  36. *
  37. * VA API requires a fixed-size reference picture array.
  38. */
  39. static void init_vaapi_pic(VAPictureHEVC *va_pic)
  40. {
  41. va_pic->picture_id = VA_INVALID_ID;
  42. va_pic->flags = VA_PICTURE_HEVC_INVALID;
  43. va_pic->pic_order_cnt = 0;
  44. }
  45. static void fill_vaapi_pic(VAPictureHEVC *va_pic, const HEVCFrame *pic, int rps_type)
  46. {
  47. va_pic->picture_id = ff_vaapi_get_surface_id(pic->frame);
  48. va_pic->pic_order_cnt = pic->poc;
  49. va_pic->flags = rps_type;
  50. if (pic->flags & HEVC_FRAME_FLAG_LONG_REF)
  51. va_pic->flags |= VA_PICTURE_HEVC_LONG_TERM_REFERENCE;
  52. if (pic->frame->interlaced_frame) {
  53. va_pic->flags |= VA_PICTURE_HEVC_FIELD_PIC;
  54. if (!pic->frame->top_field_first) {
  55. va_pic->flags |= VA_PICTURE_HEVC_BOTTOM_FIELD;
  56. }
  57. }
  58. }
  59. static int find_frame_rps_type(const HEVCContext *h, const HEVCFrame *pic)
  60. {
  61. VASurfaceID pic_surf = ff_vaapi_get_surface_id(pic->frame);
  62. int i;
  63. for (i = 0; i < h->rps[ST_CURR_BEF].nb_refs; ++i) {
  64. if (pic_surf == ff_vaapi_get_surface_id(h->rps[ST_CURR_BEF].ref[i]->frame))
  65. return VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE;
  66. }
  67. for (i = 0; i < h->rps[ST_CURR_AFT].nb_refs; ++i) {
  68. if (pic_surf == ff_vaapi_get_surface_id(h->rps[ST_CURR_AFT].ref[i]->frame))
  69. return VA_PICTURE_HEVC_RPS_ST_CURR_AFTER;
  70. }
  71. for (i = 0; i < h->rps[LT_CURR].nb_refs; ++i) {
  72. if (pic_surf == ff_vaapi_get_surface_id(h->rps[LT_CURR].ref[i]->frame))
  73. return VA_PICTURE_HEVC_RPS_LT_CURR;
  74. }
  75. return 0;
  76. }
  77. static void fill_vaapi_ReferenceFrames(const HEVCContext *h, VAPictureParameterBufferHEVC *pp)
  78. {
  79. const HEVCFrame *current_picture = h->ref;
  80. int i, j, rps_type;
  81. for (i = 0, j = 0; i < FF_ARRAY_ELEMS(pp->ReferenceFrames); i++) {
  82. const HEVCFrame *frame = NULL;
  83. while (!frame && j < FF_ARRAY_ELEMS(h->DPB)) {
  84. if (&h->DPB[j] != current_picture && (h->DPB[j].flags & (HEVC_FRAME_FLAG_LONG_REF | HEVC_FRAME_FLAG_SHORT_REF)))
  85. frame = &h->DPB[j];
  86. j++;
  87. }
  88. init_vaapi_pic(&pp->ReferenceFrames[i]);
  89. if (frame) {
  90. rps_type = find_frame_rps_type(h, frame);
  91. fill_vaapi_pic(&pp->ReferenceFrames[i], frame, rps_type);
  92. }
  93. }
  94. }
  95. static uint8_t get_ref_pic_index(const HEVCContext *h, const HEVCFrame *frame)
  96. {
  97. vaapi_hevc_frame_data *frame_data = h->ref->hwaccel_picture_private;
  98. VAPictureParameterBufferHEVC *pp = frame_data->pic_param;
  99. uint8_t i;
  100. if (!frame)
  101. return 0xff;
  102. for (i = 0; i < FF_ARRAY_ELEMS(pp->ReferenceFrames); ++i) {
  103. VASurfaceID pid = pp->ReferenceFrames[i].picture_id;
  104. int poc = pp->ReferenceFrames[i].pic_order_cnt;
  105. if (pid != VA_INVALID_ID && pid == ff_vaapi_get_surface_id(frame->frame) && poc == frame->poc)
  106. return i;
  107. }
  108. return 0xff;
  109. }
  110. static void fill_picture_parameters(const HEVCContext *h, VAPictureParameterBufferHEVC *pp)
  111. {
  112. int i;
  113. pp->pic_fields.value = 0;
  114. pp->slice_parsing_fields.value = 0;
  115. fill_vaapi_pic(&pp->CurrPic, h->ref, 0);
  116. fill_vaapi_ReferenceFrames(h, pp);
  117. pp->pic_width_in_luma_samples = h->ps.sps->width;
  118. pp->pic_height_in_luma_samples = h->ps.sps->height;
  119. pp->log2_min_luma_coding_block_size_minus3 = h->ps.sps->log2_min_cb_size - 3;
  120. pp->pic_fields.bits.chroma_format_idc = h->ps.sps->chroma_format_idc;
  121. pp->sps_max_dec_pic_buffering_minus1 = h->ps.sps->temporal_layer[h->ps.sps->max_sub_layers - 1].max_dec_pic_buffering - 1;
  122. pp->log2_diff_max_min_luma_coding_block_size = h->ps.sps->log2_diff_max_min_coding_block_size;
  123. pp->log2_min_transform_block_size_minus2 = h->ps.sps->log2_min_tb_size - 2;
  124. pp->log2_diff_max_min_transform_block_size = h->ps.sps->log2_max_trafo_size - h->ps.sps->log2_min_tb_size;
  125. pp->max_transform_hierarchy_depth_inter = h->ps.sps->max_transform_hierarchy_depth_inter;
  126. pp->max_transform_hierarchy_depth_intra = h->ps.sps->max_transform_hierarchy_depth_intra;
  127. pp->num_short_term_ref_pic_sets = h->ps.sps->nb_st_rps;
  128. pp->num_long_term_ref_pic_sps = h->ps.sps->num_long_term_ref_pics_sps;
  129. pp->num_ref_idx_l0_default_active_minus1 = h->ps.pps->num_ref_idx_l0_default_active - 1;
  130. pp->num_ref_idx_l1_default_active_minus1 = h->ps.pps->num_ref_idx_l1_default_active - 1;
  131. pp->init_qp_minus26 = h->ps.pps->pic_init_qp_minus26;
  132. pp->pps_cb_qp_offset = h->ps.pps->cb_qp_offset;
  133. pp->pps_cr_qp_offset = h->ps.pps->cr_qp_offset;
  134. pp->pic_fields.bits.tiles_enabled_flag = h->ps.pps->tiles_enabled_flag;
  135. pp->pic_fields.bits.separate_colour_plane_flag = h->ps.sps->separate_colour_plane_flag;
  136. pp->pic_fields.bits.pcm_enabled_flag = h->ps.sps->pcm_enabled_flag;
  137. pp->pic_fields.bits.scaling_list_enabled_flag = h->ps.sps->scaling_list_enable_flag;
  138. pp->pic_fields.bits.transform_skip_enabled_flag = h->ps.pps->transform_skip_enabled_flag;
  139. pp->pic_fields.bits.amp_enabled_flag = h->ps.sps->amp_enabled_flag;
  140. pp->pic_fields.bits.strong_intra_smoothing_enabled_flag = h->ps.sps->sps_strong_intra_smoothing_enable_flag;
  141. pp->pic_fields.bits.sign_data_hiding_enabled_flag = h->ps.pps->sign_data_hiding_flag;
  142. pp->pic_fields.bits.constrained_intra_pred_flag = h->ps.pps->constrained_intra_pred_flag;
  143. pp->pic_fields.bits.cu_qp_delta_enabled_flag = h->ps.pps->cu_qp_delta_enabled_flag;
  144. pp->pic_fields.bits.weighted_pred_flag = h->ps.pps->weighted_pred_flag;
  145. pp->pic_fields.bits.weighted_bipred_flag = h->ps.pps->weighted_bipred_flag;
  146. pp->pic_fields.bits.transquant_bypass_enabled_flag = h->ps.pps->transquant_bypass_enable_flag;
  147. pp->pic_fields.bits.entropy_coding_sync_enabled_flag = h->ps.pps->entropy_coding_sync_enabled_flag;
  148. pp->pic_fields.bits.pps_loop_filter_across_slices_enabled_flag = h->ps.pps->seq_loop_filter_across_slices_enabled_flag;
  149. pp->pic_fields.bits.loop_filter_across_tiles_enabled_flag = h->ps.pps->loop_filter_across_tiles_enabled_flag;
  150. pp->pic_fields.bits.pcm_loop_filter_disabled_flag = h->ps.sps->pcm.loop_filter_disable_flag;
  151. pp->pcm_sample_bit_depth_luma_minus1 = h->ps.sps->pcm.bit_depth - 1;
  152. pp->pcm_sample_bit_depth_chroma_minus1 = h->ps.sps->pcm.bit_depth_chroma - 1;
  153. pp->log2_min_pcm_luma_coding_block_size_minus3 = h->ps.sps->pcm.log2_min_pcm_cb_size - 3;
  154. pp->log2_diff_max_min_pcm_luma_coding_block_size = h->ps.sps->pcm.log2_max_pcm_cb_size - h->ps.sps->pcm.log2_min_pcm_cb_size;
  155. memset(pp->column_width_minus1, 0, sizeof(pp->column_width_minus1));
  156. memset(pp->row_height_minus1, 0, sizeof(pp->row_height_minus1));
  157. if (h->ps.pps->tiles_enabled_flag) {
  158. pp->num_tile_columns_minus1 = h->ps.pps->num_tile_columns - 1;
  159. pp->num_tile_rows_minus1 = h->ps.pps->num_tile_rows - 1;
  160. for (i = 0; i < h->ps.pps->num_tile_columns; i++)
  161. pp->column_width_minus1[i] = h->ps.pps->column_width[i] - 1;
  162. for (i = 0; i < h->ps.pps->num_tile_rows; i++)
  163. pp->row_height_minus1[i] = h->ps.pps->row_height[i] - 1;
  164. }
  165. pp->diff_cu_qp_delta_depth = h->ps.pps->diff_cu_qp_delta_depth;
  166. pp->pps_beta_offset_div2 = h->ps.pps->beta_offset / 2;
  167. pp->pps_tc_offset_div2 = h->ps.pps->tc_offset / 2;
  168. pp->log2_parallel_merge_level_minus2 = h->ps.pps->log2_parallel_merge_level - 2;
  169. /* Different chroma/luma bit depths are currently not supported by ffmpeg. */
  170. pp->bit_depth_luma_minus8 = h->ps.sps->bit_depth - 8;
  171. pp->bit_depth_chroma_minus8 = h->ps.sps->bit_depth - 8;
  172. pp->slice_parsing_fields.bits.lists_modification_present_flag = h->ps.pps->lists_modification_present_flag;
  173. pp->slice_parsing_fields.bits.long_term_ref_pics_present_flag = h->ps.sps->long_term_ref_pics_present_flag;
  174. pp->slice_parsing_fields.bits.sps_temporal_mvp_enabled_flag = h->ps.sps->sps_temporal_mvp_enabled_flag;
  175. pp->slice_parsing_fields.bits.cabac_init_present_flag = h->ps.pps->cabac_init_present_flag;
  176. pp->slice_parsing_fields.bits.output_flag_present_flag = h->ps.pps->output_flag_present_flag;
  177. pp->slice_parsing_fields.bits.dependent_slice_segments_enabled_flag = h->ps.pps->dependent_slice_segments_enabled_flag;
  178. pp->slice_parsing_fields.bits.pps_slice_chroma_qp_offsets_present_flag = h->ps.pps->pic_slice_level_chroma_qp_offsets_present_flag;
  179. pp->slice_parsing_fields.bits.sample_adaptive_offset_enabled_flag = h->ps.sps->sao_enabled;
  180. pp->slice_parsing_fields.bits.deblocking_filter_override_enabled_flag = h->ps.pps->deblocking_filter_override_enabled_flag;
  181. pp->slice_parsing_fields.bits.pps_disable_deblocking_filter_flag = h->ps.pps->disable_dbf;
  182. pp->slice_parsing_fields.bits.slice_segment_header_extension_present_flag = h->ps.pps->slice_header_extension_present_flag;
  183. pp->log2_max_pic_order_cnt_lsb_minus4 = h->ps.sps->log2_max_poc_lsb - 4;
  184. pp->num_extra_slice_header_bits = h->ps.pps->num_extra_slice_header_bits;
  185. if (h->nal_unit_type >= NAL_BLA_W_LP && h->nal_unit_type <= NAL_CRA_NUT) {
  186. pp->slice_parsing_fields.bits.RapPicFlag = 1;
  187. } else {
  188. pp->slice_parsing_fields.bits.RapPicFlag = 0;
  189. }
  190. if (IS_IDR(h)) {
  191. pp->slice_parsing_fields.bits.IdrPicFlag = 1;
  192. } else {
  193. pp->slice_parsing_fields.bits.IdrPicFlag = 0;
  194. }
  195. if (IS_IRAP(h)) {
  196. pp->slice_parsing_fields.bits.IntraPicFlag = 1;
  197. } else {
  198. pp->slice_parsing_fields.bits.IntraPicFlag = 0;
  199. }
  200. if (h->sh.short_term_ref_pic_set_sps_flag == 0 && h->sh.short_term_rps) {
  201. pp->st_rps_bits = h->sh.short_term_ref_pic_set_size;
  202. } else {
  203. pp->st_rps_bits = 0;
  204. }
  205. /* TODO */
  206. pp->pic_fields.bits.NoPicReorderingFlag = 0;
  207. pp->pic_fields.bits.NoBiPredFlag = 0;
  208. }
  209. /** Initialize and start decoding a frame with VA API. */
  210. static int vaapi_hevc_start_frame(AVCodecContext *avctx,
  211. av_unused const uint8_t *buffer,
  212. av_unused uint32_t size)
  213. {
  214. HEVCContext * const h = avctx->priv_data;
  215. FFVAContext * const vactx = ff_vaapi_get_context(avctx);
  216. vaapi_hevc_frame_data *frame_data = h->ref->hwaccel_picture_private;
  217. VAPictureParameterBufferHEVC *pic_param;
  218. VAIQMatrixBufferHEVC *iq_matrix;
  219. ScalingList const * scaling_list;
  220. int i, j;
  221. ff_dlog(avctx, "vaapi_hevc_start_frame()\n");
  222. vactx->slice_param_size = sizeof(VASliceParameterBufferHEVC);
  223. /* Fill in VAPictureParameterBufferHEVC. */
  224. pic_param = ff_vaapi_alloc_pic_param(vactx, sizeof(VAPictureParameterBufferHEVC));
  225. if (!pic_param)
  226. return -1;
  227. fill_picture_parameters(h, pic_param);
  228. frame_data->pic_param = pic_param;
  229. /* Fill in VAIQMatrixBufferHEVC. */
  230. if (h->ps.pps->scaling_list_data_present_flag) {
  231. scaling_list = &h->ps.pps->scaling_list;
  232. } else if (h->ps.sps->scaling_list_enable_flag) {
  233. scaling_list = &h->ps.sps->scaling_list;
  234. } else {
  235. return 0;
  236. }
  237. iq_matrix = ff_vaapi_alloc_iq_matrix(vactx, sizeof(VAIQMatrixBufferHEVC));
  238. if (!iq_matrix)
  239. return -1;
  240. for (i = 0; i < 6; ++i) {
  241. for (j = 0; j < 16; ++j) {
  242. iq_matrix->ScalingList4x4[i][j] = scaling_list->sl[0][i][j];
  243. }
  244. for (j = 0; j < 64; ++j) {
  245. iq_matrix->ScalingList8x8[i][j] = scaling_list->sl[1][i][j];
  246. iq_matrix->ScalingList16x16[i][j] = scaling_list->sl[2][i][j];
  247. if (i < 2) {
  248. iq_matrix->ScalingList32x32[i][j] = scaling_list->sl[3][i * 3][j];
  249. }
  250. }
  251. iq_matrix->ScalingListDC16x16[i] = scaling_list->sl_dc[0][i];
  252. if (i < 2) {
  253. iq_matrix->ScalingListDC32x32[i] = scaling_list->sl_dc[1][i * 3];
  254. }
  255. }
  256. return 0;
  257. }
  258. /** End a hardware decoding based frame. */
  259. static int vaapi_hevc_end_frame(AVCodecContext *avctx)
  260. {
  261. FFVAContext * const vactx = ff_vaapi_get_context(avctx);
  262. HEVCContext * const h = avctx->priv_data;
  263. vaapi_hevc_frame_data *frame_data = h->ref->hwaccel_picture_private;
  264. int ret;
  265. ff_dlog(avctx, "vaapi_hevc_end_frame()\n");
  266. frame_data->last_slice_param->LongSliceFlags.fields.LastSliceOfPic = 1;
  267. ret = ff_vaapi_commit_slices(vactx);
  268. if (ret < 0)
  269. goto finish;
  270. ret = ff_vaapi_render_picture(vactx, ff_vaapi_get_surface_id(h->ref->frame));
  271. if (ret < 0)
  272. goto finish;
  273. finish:
  274. ff_vaapi_common_end_frame(avctx);
  275. return ret;
  276. }
  277. static int fill_pred_weight_table(HEVCContext * const h,
  278. VASliceParameterBufferHEVC *slice_param,
  279. SliceHeader * const sh)
  280. {
  281. int i;
  282. memset(slice_param->delta_luma_weight_l0, 0, sizeof(slice_param->delta_luma_weight_l0));
  283. memset(slice_param->delta_luma_weight_l1, 0, sizeof(slice_param->delta_luma_weight_l1));
  284. memset(slice_param->luma_offset_l0, 0, sizeof(slice_param->luma_offset_l0));
  285. memset(slice_param->luma_offset_l1, 0, sizeof(slice_param->luma_offset_l1));
  286. memset(slice_param->delta_chroma_weight_l0, 0, sizeof(slice_param->delta_chroma_weight_l0));
  287. memset(slice_param->delta_chroma_weight_l1, 0, sizeof(slice_param->delta_chroma_weight_l1));
  288. memset(slice_param->ChromaOffsetL0, 0, sizeof(slice_param->ChromaOffsetL0));
  289. memset(slice_param->ChromaOffsetL1, 0, sizeof(slice_param->ChromaOffsetL1));
  290. slice_param->delta_chroma_log2_weight_denom = 0;
  291. slice_param->luma_log2_weight_denom = 0;
  292. if ( sh->slice_type == I_SLICE
  293. || (sh->slice_type == P_SLICE && !h->ps.pps->weighted_pred_flag)
  294. || (sh->slice_type == B_SLICE && !h->ps.pps->weighted_bipred_flag)) {
  295. return 0;
  296. }
  297. slice_param->luma_log2_weight_denom = sh->luma_log2_weight_denom;
  298. if (h->ps.sps->chroma_format_idc) {
  299. slice_param->delta_chroma_log2_weight_denom = sh->chroma_log2_weight_denom - sh->luma_log2_weight_denom;
  300. }
  301. for (i = 0; i < 15 && i < sh->nb_refs[L0]; ++i) {
  302. slice_param->delta_luma_weight_l0[i] = sh->luma_weight_l0[i] - (1 << sh->luma_log2_weight_denom);
  303. slice_param->luma_offset_l0[i] = sh->luma_offset_l0[i];
  304. slice_param->delta_chroma_weight_l0[i][0] = sh->chroma_weight_l0[i][0] - (1 << sh->chroma_log2_weight_denom);
  305. slice_param->delta_chroma_weight_l0[i][1] = sh->chroma_weight_l0[i][1] - (1 << sh->chroma_log2_weight_denom);
  306. slice_param->ChromaOffsetL0[i][0] = sh->chroma_offset_l0[i][0];
  307. slice_param->ChromaOffsetL0[i][1] = sh->chroma_offset_l0[i][1];
  308. }
  309. if (sh->slice_type == B_SLICE) {
  310. for (i = 0; i < 15 && i < sh->nb_refs[L1]; ++i) {
  311. slice_param->delta_luma_weight_l1[i] = sh->luma_weight_l1[i] - (1 << sh->luma_log2_weight_denom);
  312. slice_param->luma_offset_l1[i] = sh->luma_offset_l1[i];
  313. slice_param->delta_chroma_weight_l1[i][0] = sh->chroma_weight_l1[i][0] - (1 << sh->chroma_log2_weight_denom);
  314. slice_param->delta_chroma_weight_l1[i][1] = sh->chroma_weight_l1[i][1] - (1 << sh->chroma_log2_weight_denom);
  315. slice_param->ChromaOffsetL1[i][0] = sh->chroma_offset_l1[i][0];
  316. slice_param->ChromaOffsetL1[i][1] = sh->chroma_offset_l1[i][1];
  317. }
  318. }
  319. return 0;
  320. }
  321. /** Decode the given hevc slice with VA API. */
  322. static int vaapi_hevc_decode_slice(AVCodecContext *avctx,
  323. const uint8_t *buffer,
  324. uint32_t size)
  325. {
  326. FFVAContext * const vactx = ff_vaapi_get_context(avctx);
  327. HEVCContext * const h = avctx->priv_data;
  328. vaapi_hevc_frame_data *frame_data = h->ref->hwaccel_picture_private;
  329. SliceHeader * const sh = &h->sh;
  330. VASliceParameterBufferHEVC *slice_param;
  331. int i, list_idx;
  332. uint8_t nb_list = sh->slice_type == B_SLICE ? 2 : 1;
  333. if (sh->slice_type == I_SLICE)
  334. nb_list = 0;
  335. ff_dlog(avctx, "vaapi_hevc_decode_slice(): buffer %p, size %d\n", buffer, size);
  336. /* Fill in VASliceParameterBufferH264. */
  337. slice_param = (VASliceParameterBufferHEVC *)ff_vaapi_alloc_slice(vactx, buffer, size);
  338. if (!slice_param)
  339. return -1;
  340. frame_data->last_slice_param = slice_param;
  341. /* The base structure changed, so this has to be re-set in order to be valid on every byte order. */
  342. slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
  343. /* Add 1 to the bits count here to account for the byte_alignment bit, which allways is at least one bit and not accounted for otherwise. */
  344. slice_param->slice_data_byte_offset = (get_bits_count(&h->HEVClc->gb) + 1 + 7) / 8;
  345. slice_param->slice_segment_address = sh->slice_segment_addr;
  346. slice_param->LongSliceFlags.value = 0;
  347. slice_param->LongSliceFlags.fields.dependent_slice_segment_flag = sh->dependent_slice_segment_flag;
  348. slice_param->LongSliceFlags.fields.slice_type = sh->slice_type;
  349. slice_param->LongSliceFlags.fields.color_plane_id = sh->colour_plane_id;
  350. slice_param->LongSliceFlags.fields.mvd_l1_zero_flag = sh->mvd_l1_zero_flag;
  351. slice_param->LongSliceFlags.fields.cabac_init_flag = sh->cabac_init_flag;
  352. slice_param->LongSliceFlags.fields.slice_temporal_mvp_enabled_flag = sh->slice_temporal_mvp_enabled_flag;
  353. slice_param->LongSliceFlags.fields.slice_deblocking_filter_disabled_flag = sh->disable_deblocking_filter_flag;
  354. slice_param->LongSliceFlags.fields.collocated_from_l0_flag = sh->collocated_list == L0 ? 1 : 0;
  355. slice_param->LongSliceFlags.fields.slice_loop_filter_across_slices_enabled_flag = sh->slice_loop_filter_across_slices_enabled_flag;
  356. slice_param->LongSliceFlags.fields.slice_sao_luma_flag = sh->slice_sample_adaptive_offset_flag[0];
  357. if (h->ps.sps->chroma_format_idc) {
  358. slice_param->LongSliceFlags.fields.slice_sao_chroma_flag = sh->slice_sample_adaptive_offset_flag[1];
  359. }
  360. if (sh->slice_temporal_mvp_enabled_flag) {
  361. slice_param->collocated_ref_idx = sh->collocated_ref_idx;
  362. } else {
  363. slice_param->collocated_ref_idx = 0xFF;
  364. }
  365. slice_param->slice_qp_delta = sh->slice_qp_delta;
  366. slice_param->slice_cb_qp_offset = sh->slice_cb_qp_offset;
  367. slice_param->slice_cr_qp_offset = sh->slice_cr_qp_offset;
  368. slice_param->slice_beta_offset_div2 = sh->beta_offset / 2;
  369. slice_param->slice_tc_offset_div2 = sh->tc_offset / 2;
  370. if (sh->slice_type == I_SLICE) {
  371. slice_param->five_minus_max_num_merge_cand = 0;
  372. } else {
  373. slice_param->five_minus_max_num_merge_cand = 5 - sh->max_num_merge_cand;
  374. }
  375. slice_param->num_ref_idx_l0_active_minus1 = sh->nb_refs[L0] ? sh->nb_refs[L0] - 1 : 0;
  376. slice_param->num_ref_idx_l1_active_minus1 = sh->nb_refs[L1] ? sh->nb_refs[L1] - 1 : 0;
  377. memset(slice_param->RefPicList, 0xFF, sizeof(slice_param->RefPicList));
  378. /* h->ref->refPicList is updated befor calling each slice */
  379. for (list_idx = 0; list_idx < nb_list; ++list_idx) {
  380. RefPicList *rpl = &h->ref->refPicList[list_idx];
  381. for (i = 0; i < rpl->nb_refs; ++i) {
  382. slice_param->RefPicList[list_idx][i] = get_ref_pic_index(h, rpl->ref[i]);
  383. }
  384. }
  385. return fill_pred_weight_table(h, slice_param, sh);
  386. }
  387. AVHWAccel ff_hevc_vaapi_hwaccel = {
  388. .name = "hevc_vaapi",
  389. .type = AVMEDIA_TYPE_VIDEO,
  390. .id = AV_CODEC_ID_HEVC,
  391. .pix_fmt = AV_PIX_FMT_VAAPI,
  392. .start_frame = vaapi_hevc_start_frame,
  393. .end_frame = vaapi_hevc_end_frame,
  394. .decode_slice = vaapi_hevc_decode_slice,
  395. .init = ff_vaapi_context_init,
  396. .uninit = ff_vaapi_context_fini,
  397. .priv_data_size = sizeof(FFVAContext),
  398. .frame_priv_data_size = sizeof(vaapi_hevc_frame_data),
  399. };