You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

190 lines
7.0KB

  1. /*
  2. * MPEG-2 HW decode acceleration through VA API
  3. *
  4. * Copyright (C) 2008-2009 Splitted-Desktop Systems
  5. *
  6. * This file is part of Libav.
  7. *
  8. * Libav is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * Libav is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with Libav; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. #include "hwaccel.h"
  23. #include "mpegutils.h"
  24. #include "mpegvideo.h"
  25. #include "internal.h"
  26. #include "vaapi_decode.h"
  27. /** Reconstruct bitstream f_code */
  28. static inline int mpeg2_get_f_code(const MpegEncContext *s)
  29. {
  30. return (s->mpeg_f_code[0][0] << 12) | (s->mpeg_f_code[0][1] << 8) |
  31. (s->mpeg_f_code[1][0] << 4) | s->mpeg_f_code[1][1];
  32. }
  33. /** Determine frame start: first field for field picture or frame picture */
  34. static inline int mpeg2_get_is_frame_start(const MpegEncContext *s)
  35. {
  36. return s->first_field || s->picture_structure == PICT_FRAME;
  37. }
  38. static int vaapi_mpeg2_start_frame(AVCodecContext *avctx, av_unused const uint8_t *buffer, av_unused uint32_t size)
  39. {
  40. const MpegEncContext *s = avctx->priv_data;
  41. VAAPIDecodePicture *pic = s->current_picture_ptr->hwaccel_picture_private;
  42. VAPictureParameterBufferMPEG2 pic_param;
  43. VAIQMatrixBufferMPEG2 iq_matrix;
  44. int i, err;
  45. pic->output_surface = ff_vaapi_get_surface_id(s->current_picture_ptr->f);
  46. pic_param = (VAPictureParameterBufferMPEG2) {
  47. .horizontal_size = s->width,
  48. .vertical_size = s->height,
  49. .forward_reference_picture = VA_INVALID_ID,
  50. .backward_reference_picture = VA_INVALID_ID,
  51. .picture_coding_type = s->pict_type,
  52. .f_code = mpeg2_get_f_code(s),
  53. .picture_coding_extension.bits = {
  54. .intra_dc_precision = s->intra_dc_precision,
  55. .picture_structure = s->picture_structure,
  56. .top_field_first = s->top_field_first,
  57. .frame_pred_frame_dct = s->frame_pred_frame_dct,
  58. .concealment_motion_vectors = s->concealment_motion_vectors,
  59. .q_scale_type = s->q_scale_type,
  60. .intra_vlc_format = s->intra_vlc_format,
  61. .alternate_scan = s->alternate_scan,
  62. .repeat_first_field = s->repeat_first_field,
  63. .progressive_frame = s->progressive_frame,
  64. .is_first_field = mpeg2_get_is_frame_start(s),
  65. },
  66. };
  67. switch (s->pict_type) {
  68. case AV_PICTURE_TYPE_B:
  69. pic_param.backward_reference_picture = ff_vaapi_get_surface_id(s->next_picture.f);
  70. // fall-through
  71. case AV_PICTURE_TYPE_P:
  72. pic_param.forward_reference_picture = ff_vaapi_get_surface_id(s->last_picture.f);
  73. break;
  74. }
  75. err = ff_vaapi_decode_make_param_buffer(avctx, pic,
  76. VAPictureParameterBufferType,
  77. &pic_param, sizeof(pic_param));
  78. if (err < 0)
  79. goto fail;
  80. iq_matrix.load_intra_quantiser_matrix = 1;
  81. iq_matrix.load_non_intra_quantiser_matrix = 1;
  82. iq_matrix.load_chroma_intra_quantiser_matrix = 1;
  83. iq_matrix.load_chroma_non_intra_quantiser_matrix = 1;
  84. for (i = 0; i < 64; i++) {
  85. int n = s->idsp.idct_permutation[ff_zigzag_direct[i]];
  86. iq_matrix.intra_quantiser_matrix[i] = s->intra_matrix[n];
  87. iq_matrix.non_intra_quantiser_matrix[i] = s->inter_matrix[n];
  88. iq_matrix.chroma_intra_quantiser_matrix[i] = s->chroma_intra_matrix[n];
  89. iq_matrix.chroma_non_intra_quantiser_matrix[i] = s->chroma_inter_matrix[n];
  90. }
  91. err = ff_vaapi_decode_make_param_buffer(avctx, pic,
  92. VAIQMatrixBufferType,
  93. &iq_matrix, sizeof(iq_matrix));
  94. if (err < 0)
  95. goto fail;
  96. return 0;
  97. fail:
  98. ff_vaapi_decode_cancel(avctx, pic);
  99. return err;
  100. }
  101. static int vaapi_mpeg2_end_frame(AVCodecContext *avctx)
  102. {
  103. MpegEncContext *s = avctx->priv_data;
  104. VAAPIDecodePicture *pic = s->current_picture_ptr->hwaccel_picture_private;
  105. int ret;
  106. ret = ff_vaapi_decode_issue(avctx, pic);
  107. if (ret < 0)
  108. goto fail;
  109. ff_mpeg_draw_horiz_band(s, 0, s->avctx->height);
  110. fail:
  111. return ret;
  112. }
  113. static int vaapi_mpeg2_decode_slice(AVCodecContext *avctx, const uint8_t *buffer, uint32_t size)
  114. {
  115. const MpegEncContext *s = avctx->priv_data;
  116. VAAPIDecodePicture *pic = s->current_picture_ptr->hwaccel_picture_private;
  117. VASliceParameterBufferMPEG2 slice_param;
  118. GetBitContext gb;
  119. uint32_t quantiser_scale_code, intra_slice_flag, macroblock_offset;
  120. int err;
  121. /* Determine macroblock_offset */
  122. init_get_bits(&gb, buffer, 8 * size);
  123. if (get_bits_long(&gb, 32) >> 8 != 1) /* start code */
  124. return AVERROR_INVALIDDATA;
  125. quantiser_scale_code = get_bits(&gb, 5);
  126. intra_slice_flag = get_bits1(&gb);
  127. if (intra_slice_flag) {
  128. skip_bits(&gb, 8);
  129. while (get_bits1(&gb) != 0)
  130. skip_bits(&gb, 8);
  131. }
  132. macroblock_offset = get_bits_count(&gb);
  133. slice_param = (VASliceParameterBufferMPEG2) {
  134. .slice_data_size = size,
  135. .slice_data_offset = 0,
  136. .slice_data_flag = VA_SLICE_DATA_FLAG_ALL,
  137. .macroblock_offset = macroblock_offset,
  138. .slice_horizontal_position = s->mb_x,
  139. .slice_vertical_position = s->mb_y >> (s->picture_structure != PICT_FRAME),
  140. .quantiser_scale_code = quantiser_scale_code,
  141. .intra_slice_flag = intra_slice_flag,
  142. };
  143. err = ff_vaapi_decode_make_slice_buffer(avctx, pic,
  144. &slice_param, sizeof(slice_param),
  145. buffer, size);
  146. if (err < 0) {
  147. ff_vaapi_decode_cancel(avctx, pic);
  148. return err;
  149. }
  150. return 0;
  151. }
  152. AVHWAccel ff_mpeg2_vaapi_hwaccel = {
  153. .name = "mpeg2_vaapi",
  154. .type = AVMEDIA_TYPE_VIDEO,
  155. .id = AV_CODEC_ID_MPEG2VIDEO,
  156. .pix_fmt = AV_PIX_FMT_VAAPI,
  157. .start_frame = &vaapi_mpeg2_start_frame,
  158. .end_frame = &vaapi_mpeg2_end_frame,
  159. .decode_slice = &vaapi_mpeg2_decode_slice,
  160. .frame_priv_data_size = sizeof(VAAPIDecodePicture),
  161. .init = &ff_vaapi_decode_init,
  162. .uninit = &ff_vaapi_decode_uninit,
  163. .priv_data_size = sizeof(VAAPIDecodeContext),
  164. .caps_internal = HWACCEL_CAP_ASYNC_SAFE,
  165. };