You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

479 lines
15KB

  1. /*
  2. * This file is part of Libav.
  3. *
  4. * Libav is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * Libav is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with Libav; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include <stdint.h>
  20. #include <string.h>
  21. #include <vdpau/vdpau.h>
  22. #include "buffer.h"
  23. #include "common.h"
  24. #include "hwcontext.h"
  25. #include "hwcontext_internal.h"
  26. #include "hwcontext_vdpau.h"
  27. #include "mem.h"
  28. #include "pixfmt.h"
  29. #include "pixdesc.h"
  30. typedef struct VDPAUDeviceContext {
  31. VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
  32. VdpVideoSurfaceGetBitsYCbCr *get_data;
  33. VdpVideoSurfacePutBitsYCbCr *put_data;
  34. VdpVideoSurfaceCreate *surf_create;
  35. VdpVideoSurfaceDestroy *surf_destroy;
  36. enum AVPixelFormat *pix_fmts[3];
  37. int nb_pix_fmts[3];
  38. } VDPAUDeviceContext;
  39. typedef struct VDPAUFramesContext {
  40. VdpVideoSurfaceGetBitsYCbCr *get_data;
  41. VdpVideoSurfacePutBitsYCbCr *put_data;
  42. VdpChromaType chroma_type;
  43. int chroma_idx;
  44. const enum AVPixelFormat *pix_fmts;
  45. int nb_pix_fmts;
  46. } VDPAUFramesContext;
  47. typedef struct VDPAUPixFmtMap {
  48. VdpYCbCrFormat vdpau_fmt;
  49. enum AVPixelFormat pix_fmt;
  50. } VDPAUPixFmtMap;
  51. static const VDPAUPixFmtMap pix_fmts_420[] = {
  52. { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
  53. { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
  54. { 0, AV_PIX_FMT_NONE, },
  55. };
  56. static const VDPAUPixFmtMap pix_fmts_422[] = {
  57. { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
  58. { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
  59. { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
  60. { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
  61. { 0, AV_PIX_FMT_NONE, },
  62. };
  63. static const VDPAUPixFmtMap pix_fmts_444[] = {
  64. { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P },
  65. { 0, AV_PIX_FMT_NONE, },
  66. };
  67. static const struct {
  68. VdpChromaType chroma_type;
  69. const VDPAUPixFmtMap *map;
  70. } vdpau_pix_fmts[] = {
  71. { VDP_CHROMA_TYPE_420, pix_fmts_420 },
  72. { VDP_CHROMA_TYPE_422, pix_fmts_422 },
  73. { VDP_CHROMA_TYPE_444, pix_fmts_444 },
  74. };
  75. static int count_pixfmts(const VDPAUPixFmtMap *map)
  76. {
  77. int count = 0;
  78. while (map->pix_fmt != AV_PIX_FMT_NONE) {
  79. map++;
  80. count++;
  81. }
  82. return count;
  83. }
  84. static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
  85. {
  86. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  87. VDPAUDeviceContext *priv = ctx->internal->priv;
  88. int i;
  89. for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
  90. const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
  91. int nb_pix_fmts;
  92. nb_pix_fmts = count_pixfmts(map);
  93. priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
  94. if (!priv->pix_fmts[i])
  95. return AVERROR(ENOMEM);
  96. nb_pix_fmts = 0;
  97. while (map->pix_fmt != AV_PIX_FMT_NONE) {
  98. VdpBool supported;
  99. VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
  100. map->vdpau_fmt, &supported);
  101. if (err == VDP_STATUS_OK && supported)
  102. priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
  103. map++;
  104. }
  105. priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
  106. priv->nb_pix_fmts[i] = nb_pix_fmts;
  107. }
  108. return 0;
  109. }
  110. #define GET_CALLBACK(id, result) \
  111. do { \
  112. void *tmp; \
  113. err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
  114. if (err != VDP_STATUS_OK) { \
  115. av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
  116. return AVERROR_UNKNOWN; \
  117. } \
  118. result = tmp; \
  119. } while (0)
  120. static int vdpau_device_init(AVHWDeviceContext *ctx)
  121. {
  122. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  123. VDPAUDeviceContext *priv = ctx->internal->priv;
  124. VdpStatus err;
  125. int ret;
  126. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
  127. priv->get_transfer_caps);
  128. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
  129. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
  130. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
  131. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
  132. ret = vdpau_init_pixmfts(ctx);
  133. if (ret < 0) {
  134. av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
  135. return ret;
  136. }
  137. return 0;
  138. }
  139. static void vdpau_device_uninit(AVHWDeviceContext *ctx)
  140. {
  141. VDPAUDeviceContext *priv = ctx->internal->priv;
  142. int i;
  143. for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
  144. av_freep(&priv->pix_fmts[i]);
  145. }
  146. static void vdpau_buffer_free(void *opaque, uint8_t *data)
  147. {
  148. AVHWFramesContext *ctx = opaque;
  149. VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  150. VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
  151. device_priv->surf_destroy(surf);
  152. }
  153. static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
  154. {
  155. AVHWFramesContext *ctx = opaque;
  156. VDPAUFramesContext *priv = ctx->internal->priv;
  157. AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  158. VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  159. AVBufferRef *ret;
  160. VdpVideoSurface surf;
  161. VdpStatus err;
  162. err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
  163. ctx->width, ctx->height, &surf);
  164. if (err != VDP_STATUS_OK) {
  165. av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
  166. return NULL;
  167. }
  168. ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
  169. vdpau_buffer_free, ctx, AV_BUFFER_FLAG_READONLY);
  170. if (!ret) {
  171. device_priv->surf_destroy(surf);
  172. return NULL;
  173. }
  174. return ret;
  175. }
  176. static int vdpau_frames_init(AVHWFramesContext *ctx)
  177. {
  178. VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  179. VDPAUFramesContext *priv = ctx->internal->priv;
  180. int i;
  181. switch (ctx->sw_format) {
  182. case AV_PIX_FMT_YUV420P: priv->chroma_type = VDP_CHROMA_TYPE_420; break;
  183. case AV_PIX_FMT_YUV422P: priv->chroma_type = VDP_CHROMA_TYPE_422; break;
  184. case AV_PIX_FMT_YUV444P: priv->chroma_type = VDP_CHROMA_TYPE_444; break;
  185. default:
  186. av_log(ctx, AV_LOG_ERROR, "Unsupported data layout: %s\n",
  187. av_get_pix_fmt_name(ctx->sw_format));
  188. return AVERROR(ENOSYS);
  189. }
  190. for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
  191. if (vdpau_pix_fmts[i].chroma_type == priv->chroma_type) {
  192. priv->chroma_idx = i;
  193. priv->pix_fmts = device_priv->pix_fmts[i];
  194. priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
  195. break;
  196. }
  197. }
  198. if (!priv->pix_fmts) {
  199. av_log(ctx, AV_LOG_ERROR, "Unsupported chroma type: %d\n", priv->chroma_type);
  200. return AVERROR(ENOSYS);
  201. }
  202. if (!ctx->pool) {
  203. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
  204. vdpau_pool_alloc, NULL);
  205. if (!ctx->internal->pool_internal)
  206. return AVERROR(ENOMEM);
  207. }
  208. priv->get_data = device_priv->get_data;
  209. priv->put_data = device_priv->put_data;
  210. return 0;
  211. }
  212. static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  213. {
  214. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  215. if (!frame->buf[0])
  216. return AVERROR(ENOMEM);
  217. frame->data[3] = frame->buf[0]->data;
  218. frame->format = AV_PIX_FMT_VDPAU;
  219. frame->width = ctx->width;
  220. frame->height = ctx->height;
  221. return 0;
  222. }
  223. static int vdpau_transfer_get_formats(AVHWFramesContext *ctx,
  224. enum AVHWFrameTransferDirection dir,
  225. enum AVPixelFormat **formats)
  226. {
  227. VDPAUFramesContext *priv = ctx->internal->priv;
  228. enum AVPixelFormat *fmts;
  229. if (priv->nb_pix_fmts == 1) {
  230. av_log(ctx, AV_LOG_ERROR,
  231. "No target formats are supported for this chroma type\n");
  232. return AVERROR(ENOSYS);
  233. }
  234. fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
  235. if (!fmts)
  236. return AVERROR(ENOMEM);
  237. memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
  238. *formats = fmts;
  239. return 0;
  240. }
  241. static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  242. const AVFrame *src)
  243. {
  244. VDPAUFramesContext *priv = ctx->internal->priv;
  245. VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
  246. void *data[3];
  247. uint32_t linesize[3];
  248. const VDPAUPixFmtMap *map;
  249. VdpYCbCrFormat vdpau_format;
  250. VdpStatus err;
  251. int i;
  252. for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
  253. data[i] = dst->data[i];
  254. if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
  255. av_log(ctx, AV_LOG_ERROR,
  256. "The linesize %d cannot be represented as uint32\n",
  257. dst->linesize[i]);
  258. return AVERROR(ERANGE);
  259. }
  260. linesize[i] = dst->linesize[i];
  261. }
  262. map = vdpau_pix_fmts[priv->chroma_idx].map;
  263. for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
  264. if (map[i].pix_fmt == dst->format) {
  265. vdpau_format = map[i].vdpau_fmt;
  266. break;
  267. }
  268. }
  269. if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
  270. av_log(ctx, AV_LOG_ERROR,
  271. "Unsupported target pixel format: %s\n",
  272. av_get_pix_fmt_name(dst->format));
  273. return AVERROR(EINVAL);
  274. }
  275. if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
  276. FFSWAP(void*, data[1], data[2]);
  277. err = priv->get_data(surf, vdpau_format, data, linesize);
  278. if (err != VDP_STATUS_OK) {
  279. av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
  280. return AVERROR_UNKNOWN;
  281. }
  282. return 0;
  283. }
  284. static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  285. const AVFrame *src)
  286. {
  287. VDPAUFramesContext *priv = ctx->internal->priv;
  288. VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
  289. const void *data[3];
  290. uint32_t linesize[3];
  291. const VDPAUPixFmtMap *map;
  292. VdpYCbCrFormat vdpau_format;
  293. VdpStatus err;
  294. int i;
  295. for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
  296. data[i] = src->data[i];
  297. if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
  298. av_log(ctx, AV_LOG_ERROR,
  299. "The linesize %d cannot be represented as uint32\n",
  300. src->linesize[i]);
  301. return AVERROR(ERANGE);
  302. }
  303. linesize[i] = src->linesize[i];
  304. }
  305. map = vdpau_pix_fmts[priv->chroma_idx].map;
  306. for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
  307. if (map[i].pix_fmt == src->format) {
  308. vdpau_format = map[i].vdpau_fmt;
  309. break;
  310. }
  311. }
  312. if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
  313. av_log(ctx, AV_LOG_ERROR,
  314. "Unsupported source pixel format: %s\n",
  315. av_get_pix_fmt_name(src->format));
  316. return AVERROR(EINVAL);
  317. }
  318. if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
  319. FFSWAP(const void*, data[1], data[2]);
  320. err = priv->put_data(surf, vdpau_format, data, linesize);
  321. if (err != VDP_STATUS_OK) {
  322. av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
  323. return AVERROR_UNKNOWN;
  324. }
  325. return 0;
  326. }
  327. #if HAVE_VDPAU_X11
  328. #include <vdpau/vdpau_x11.h>
  329. #include <X11/Xlib.h>
  330. typedef struct VDPAUDevicePriv {
  331. VdpDeviceDestroy *device_destroy;
  332. Display *dpy;
  333. } VDPAUDevicePriv;
  334. static void vdpau_device_free(AVHWDeviceContext *ctx)
  335. {
  336. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  337. VDPAUDevicePriv *priv = ctx->user_opaque;
  338. if (priv->device_destroy)
  339. priv->device_destroy(hwctx->device);
  340. if (priv->dpy)
  341. XCloseDisplay(priv->dpy);
  342. av_freep(&priv);
  343. }
  344. static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
  345. AVDictionary *opts, int flags)
  346. {
  347. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  348. VDPAUDevicePriv *priv;
  349. VdpStatus err;
  350. VdpGetInformationString *get_information_string;
  351. const char *display, *vendor;
  352. priv = av_mallocz(sizeof(*priv));
  353. if (!priv)
  354. return AVERROR(ENOMEM);
  355. ctx->user_opaque = priv;
  356. ctx->free = vdpau_device_free;
  357. priv->dpy = XOpenDisplay(device);
  358. if (!priv->dpy) {
  359. av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
  360. XDisplayName(device));
  361. return AVERROR_UNKNOWN;
  362. }
  363. display = XDisplayString(priv->dpy);
  364. err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
  365. &hwctx->device, &hwctx->get_proc_address);
  366. if (err != VDP_STATUS_OK) {
  367. av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
  368. display);
  369. return AVERROR_UNKNOWN;
  370. }
  371. GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
  372. GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
  373. get_information_string(&vendor);
  374. av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
  375. "X11 display %s\n", vendor, display);
  376. return 0;
  377. }
  378. #endif
  379. const HWContextType ff_hwcontext_type_vdpau = {
  380. .type = AV_HWDEVICE_TYPE_VDPAU,
  381. .name = "VDPAU",
  382. .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
  383. .device_priv_size = sizeof(VDPAUDeviceContext),
  384. .frames_priv_size = sizeof(VDPAUFramesContext),
  385. #if HAVE_VDPAU_X11
  386. .device_create = vdpau_device_create,
  387. #endif
  388. .device_init = vdpau_device_init,
  389. .device_uninit = vdpau_device_uninit,
  390. .frames_init = vdpau_frames_init,
  391. .frames_get_buffer = vdpau_get_buffer,
  392. .transfer_get_formats = vdpau_transfer_get_formats,
  393. .transfer_data_to = vdpau_transfer_data_to,
  394. .transfer_data_from = vdpau_transfer_data_from,
  395. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE },
  396. };