You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

434 lines
14KB

  1. /*
  2. * Video Decode and Presentation API for UNIX (VDPAU) is used for
  3. * HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1.
  4. *
  5. * Copyright (c) 2008 NVIDIA
  6. *
  7. * This file is part of Libav.
  8. *
  9. * Libav is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * Libav is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with Libav; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <limits.h>
  24. #include "avcodec.h"
  25. #include "decode.h"
  26. #include "internal.h"
  27. #include "h264dec.h"
  28. #include "vc1.h"
  29. #include "vdpau.h"
  30. #include "vdpau_internal.h"
  31. /**
  32. * @addtogroup VDPAU_Decoding
  33. *
  34. * @{
  35. */
  36. static int vdpau_error(VdpStatus status)
  37. {
  38. switch (status) {
  39. case VDP_STATUS_OK:
  40. return 0;
  41. case VDP_STATUS_NO_IMPLEMENTATION:
  42. return AVERROR(ENOSYS);
  43. case VDP_STATUS_DISPLAY_PREEMPTED:
  44. return AVERROR(EIO);
  45. case VDP_STATUS_INVALID_HANDLE:
  46. return AVERROR(EBADF);
  47. case VDP_STATUS_INVALID_POINTER:
  48. return AVERROR(EFAULT);
  49. case VDP_STATUS_RESOURCES:
  50. return AVERROR(ENOBUFS);
  51. case VDP_STATUS_HANDLE_DEVICE_MISMATCH:
  52. return AVERROR(EXDEV);
  53. case VDP_STATUS_ERROR:
  54. return AVERROR(EIO);
  55. default:
  56. return AVERROR(EINVAL);
  57. }
  58. }
  59. int av_vdpau_get_surface_parameters(AVCodecContext *avctx,
  60. VdpChromaType *type,
  61. uint32_t *width, uint32_t *height)
  62. {
  63. VdpChromaType t;
  64. uint32_t w = avctx->coded_width;
  65. uint32_t h = avctx->coded_height;
  66. /* See <vdpau/vdpau.h> for per-type alignment constraints. */
  67. switch (avctx->sw_pix_fmt) {
  68. case AV_PIX_FMT_YUV420P:
  69. case AV_PIX_FMT_YUVJ420P:
  70. t = VDP_CHROMA_TYPE_420;
  71. w = (w + 1) & ~1;
  72. h = (h + 3) & ~3;
  73. break;
  74. case AV_PIX_FMT_YUV422P:
  75. case AV_PIX_FMT_YUVJ422P:
  76. t = VDP_CHROMA_TYPE_422;
  77. w = (w + 1) & ~1;
  78. h = (h + 1) & ~1;
  79. break;
  80. case AV_PIX_FMT_YUV444P:
  81. case AV_PIX_FMT_YUVJ444P:
  82. t = VDP_CHROMA_TYPE_444;
  83. h = (h + 1) & ~1;
  84. break;
  85. default:
  86. return AVERROR(ENOSYS);
  87. }
  88. if (type)
  89. *type = t;
  90. if (width)
  91. *width = w;
  92. if (height)
  93. *height = h;
  94. return 0;
  95. }
  96. int ff_vdpau_common_frame_params(AVCodecContext *avctx,
  97. AVBufferRef *hw_frames_ctx)
  98. {
  99. AVHWFramesContext *hw_frames = (AVHWFramesContext*)hw_frames_ctx->data;
  100. VdpChromaType type;
  101. uint32_t width;
  102. uint32_t height;
  103. if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
  104. return AVERROR(EINVAL);
  105. hw_frames->format = AV_PIX_FMT_VDPAU;
  106. hw_frames->sw_format = avctx->sw_pix_fmt;
  107. hw_frames->width = width;
  108. hw_frames->height = height;
  109. return 0;
  110. }
  111. int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
  112. int level)
  113. {
  114. VDPAUHWContext *hwctx = avctx->hwaccel_context;
  115. VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
  116. VdpVideoSurfaceQueryCapabilities *surface_query_caps;
  117. VdpDecoderQueryCapabilities *decoder_query_caps;
  118. VdpDecoderCreate *create;
  119. void *func;
  120. VdpStatus status;
  121. VdpBool supported;
  122. uint32_t max_level, max_mb, max_width, max_height;
  123. VdpChromaType type;
  124. uint32_t width;
  125. uint32_t height;
  126. int ret;
  127. vdctx->width = UINT32_MAX;
  128. vdctx->height = UINT32_MAX;
  129. if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
  130. return AVERROR(ENOSYS);
  131. if (hwctx) {
  132. hwctx->reset = 0;
  133. if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
  134. vdctx->decoder = hwctx->context.decoder;
  135. vdctx->render = hwctx->context.render;
  136. vdctx->device = VDP_INVALID_HANDLE;
  137. return 0; /* Decoder created by user */
  138. }
  139. vdctx->device = hwctx->device;
  140. vdctx->get_proc_address = hwctx->get_proc_address;
  141. if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
  142. level = 0;
  143. if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
  144. type != VDP_CHROMA_TYPE_420)
  145. return AVERROR(ENOSYS);
  146. } else {
  147. AVHWFramesContext *frames_ctx;
  148. AVVDPAUDeviceContext *dev_ctx;
  149. ret = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_VDPAU);
  150. if (ret < 0)
  151. return ret;
  152. frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  153. dev_ctx = frames_ctx->device_ctx->hwctx;
  154. vdctx->device = dev_ctx->device;
  155. vdctx->get_proc_address = dev_ctx->get_proc_address;
  156. if (avctx->hwaccel_flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
  157. level = 0;
  158. }
  159. if (level < 0)
  160. return AVERROR(ENOTSUP);
  161. status = vdctx->get_proc_address(vdctx->device,
  162. VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
  163. &func);
  164. if (status != VDP_STATUS_OK)
  165. return vdpau_error(status);
  166. else
  167. surface_query_caps = func;
  168. status = surface_query_caps(vdctx->device, type, &supported,
  169. &max_width, &max_height);
  170. if (status != VDP_STATUS_OK)
  171. return vdpau_error(status);
  172. if (supported != VDP_TRUE ||
  173. max_width < width || max_height < height)
  174. return AVERROR(ENOTSUP);
  175. status = vdctx->get_proc_address(vdctx->device,
  176. VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
  177. &func);
  178. if (status != VDP_STATUS_OK)
  179. return vdpau_error(status);
  180. else
  181. decoder_query_caps = func;
  182. status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
  183. &max_mb, &max_width, &max_height);
  184. #ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
  185. if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
  186. profile = VDP_DECODER_PROFILE_H264_MAIN;
  187. status = decoder_query_caps(vdctx->device, profile, &supported,
  188. &max_level, &max_mb,
  189. &max_width, &max_height);
  190. }
  191. #endif
  192. if (status != VDP_STATUS_OK)
  193. return vdpau_error(status);
  194. if (supported != VDP_TRUE || max_level < level ||
  195. max_width < width || max_height < height)
  196. return AVERROR(ENOTSUP);
  197. status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
  198. &func);
  199. if (status != VDP_STATUS_OK)
  200. return vdpau_error(status);
  201. else
  202. create = func;
  203. status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
  204. &func);
  205. if (status != VDP_STATUS_OK)
  206. return vdpau_error(status);
  207. else
  208. vdctx->render = func;
  209. status = create(vdctx->device, profile, width, height, avctx->refs,
  210. &vdctx->decoder);
  211. if (status == VDP_STATUS_OK) {
  212. vdctx->width = avctx->coded_width;
  213. vdctx->height = avctx->coded_height;
  214. }
  215. return vdpau_error(status);
  216. }
  217. int ff_vdpau_common_uninit(AVCodecContext *avctx)
  218. {
  219. VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
  220. VdpDecoderDestroy *destroy;
  221. void *func;
  222. VdpStatus status;
  223. if (vdctx->device == VDP_INVALID_HANDLE)
  224. return 0; /* Decoder created and destroyed by user */
  225. if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX)
  226. return 0;
  227. status = vdctx->get_proc_address(vdctx->device,
  228. VDP_FUNC_ID_DECODER_DESTROY, &func);
  229. if (status != VDP_STATUS_OK)
  230. return vdpau_error(status);
  231. else
  232. destroy = func;
  233. status = destroy(vdctx->decoder);
  234. return vdpau_error(status);
  235. }
  236. static int ff_vdpau_common_reinit(AVCodecContext *avctx)
  237. {
  238. VDPAUHWContext *hwctx = avctx->hwaccel_context;
  239. VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
  240. if (vdctx->device == VDP_INVALID_HANDLE)
  241. return 0; /* Decoder created by user */
  242. if (avctx->coded_width == vdctx->width &&
  243. avctx->coded_height == vdctx->height && (!hwctx || !hwctx->reset))
  244. return 0;
  245. avctx->hwaccel->uninit(avctx);
  246. return avctx->hwaccel->init(avctx);
  247. }
  248. int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx,
  249. av_unused const uint8_t *buffer,
  250. av_unused uint32_t size)
  251. {
  252. pic_ctx->bitstream_buffers_allocated = 0;
  253. pic_ctx->bitstream_buffers_used = 0;
  254. pic_ctx->bitstream_buffers = NULL;
  255. return 0;
  256. }
  257. int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame,
  258. struct vdpau_picture_context *pic_ctx)
  259. {
  260. VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
  261. VdpVideoSurface surf = ff_vdpau_get_surface_id(frame);
  262. VdpStatus status;
  263. int val;
  264. val = ff_vdpau_common_reinit(avctx);
  265. if (val < 0)
  266. return val;
  267. status = vdctx->render(vdctx->decoder, surf, &pic_ctx->info,
  268. pic_ctx->bitstream_buffers_used,
  269. pic_ctx->bitstream_buffers);
  270. av_freep(&pic_ctx->bitstream_buffers);
  271. return vdpau_error(status);
  272. }
  273. #if CONFIG_MPEG1_VDPAU_HWACCEL || \
  274. CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
  275. CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL
  276. int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
  277. {
  278. MpegEncContext *s = avctx->priv_data;
  279. Picture *pic = s->current_picture_ptr;
  280. struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
  281. int val;
  282. val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx);
  283. if (val < 0)
  284. return val;
  285. ff_mpeg_draw_horiz_band(s, 0, s->avctx->height);
  286. return 0;
  287. }
  288. #endif
  289. int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx,
  290. const uint8_t *buf, uint32_t size)
  291. {
  292. VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
  293. buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
  294. (pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
  295. if (!buffers)
  296. return AVERROR(ENOMEM);
  297. pic_ctx->bitstream_buffers = buffers;
  298. buffers += pic_ctx->bitstream_buffers_used++;
  299. buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION;
  300. buffers->bitstream = buf;
  301. buffers->bitstream_bytes = size;
  302. return 0;
  303. }
  304. #if FF_API_VDPAU_PROFILE
  305. int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
  306. {
  307. #define PROFILE(prof) \
  308. do { \
  309. *profile = VDP_DECODER_PROFILE_##prof; \
  310. return 0; \
  311. } while (0)
  312. switch (avctx->codec_id) {
  313. case AV_CODEC_ID_MPEG1VIDEO: PROFILE(MPEG1);
  314. case AV_CODEC_ID_MPEG2VIDEO:
  315. switch (avctx->profile) {
  316. case FF_PROFILE_MPEG2_MAIN: PROFILE(MPEG2_MAIN);
  317. case FF_PROFILE_MPEG2_SIMPLE: PROFILE(MPEG2_SIMPLE);
  318. default: return AVERROR(EINVAL);
  319. }
  320. case AV_CODEC_ID_H263: PROFILE(MPEG4_PART2_ASP);
  321. case AV_CODEC_ID_MPEG4:
  322. switch (avctx->profile) {
  323. case FF_PROFILE_MPEG4_SIMPLE: PROFILE(MPEG4_PART2_SP);
  324. case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(MPEG4_PART2_ASP);
  325. default: return AVERROR(EINVAL);
  326. }
  327. case AV_CODEC_ID_H264:
  328. switch (avctx->profile & ~FF_PROFILE_H264_INTRA) {
  329. case FF_PROFILE_H264_BASELINE: PROFILE(H264_BASELINE);
  330. case FF_PROFILE_H264_CONSTRAINED_BASELINE:
  331. case FF_PROFILE_H264_MAIN: PROFILE(H264_MAIN);
  332. case FF_PROFILE_H264_HIGH: PROFILE(H264_HIGH);
  333. #ifdef VDP_DECODER_PROFILE_H264_EXTENDED
  334. case FF_PROFILE_H264_EXTENDED: PROFILE(H264_EXTENDED);
  335. #endif
  336. default: return AVERROR(EINVAL);
  337. }
  338. case AV_CODEC_ID_WMV3:
  339. case AV_CODEC_ID_VC1:
  340. switch (avctx->profile) {
  341. case FF_PROFILE_VC1_SIMPLE: PROFILE(VC1_SIMPLE);
  342. case FF_PROFILE_VC1_MAIN: PROFILE(VC1_MAIN);
  343. case FF_PROFILE_VC1_ADVANCED: PROFILE(VC1_ADVANCED);
  344. default: return AVERROR(EINVAL);
  345. }
  346. }
  347. return AVERROR(EINVAL);
  348. #undef PROFILE
  349. }
  350. #endif /* FF_API_VDPAU_PROFILE */
  351. AVVDPAUContext *av_vdpau_alloc_context(void)
  352. {
  353. return av_mallocz(sizeof(AVVDPAUContext));
  354. }
  355. int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
  356. VdpGetProcAddress *get_proc, unsigned flags)
  357. {
  358. VDPAUHWContext *hwctx;
  359. if (flags & ~(AV_HWACCEL_FLAG_IGNORE_LEVEL|AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH))
  360. return AVERROR(EINVAL);
  361. if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
  362. return AVERROR(ENOMEM);
  363. hwctx = avctx->hwaccel_context;
  364. memset(hwctx, 0, sizeof(*hwctx));
  365. hwctx->context.decoder = VDP_INVALID_HANDLE;
  366. hwctx->device = device;
  367. hwctx->get_proc_address = get_proc;
  368. hwctx->flags = flags;
  369. hwctx->reset = 1;
  370. return 0;
  371. }
  372. /* @}*/