You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

529 lines
17KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include <stdint.h>
  20. #include <string.h>
  21. #include <vdpau/vdpau.h>
  22. #include "buffer.h"
  23. #include "common.h"
  24. #include "hwcontext.h"
  25. #include "hwcontext_internal.h"
  26. #include "hwcontext_vdpau.h"
  27. #include "mem.h"
  28. #include "pixfmt.h"
  29. #include "pixdesc.h"
  30. typedef struct VDPAUPixFmtMap {
  31. VdpYCbCrFormat vdpau_fmt;
  32. enum AVPixelFormat pix_fmt;
  33. } VDPAUPixFmtMap;
  34. static const VDPAUPixFmtMap pix_fmts_420[] = {
  35. { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
  36. { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
  37. #ifdef VDP_YCBCR_FORMAT_P016
  38. { VDP_YCBCR_FORMAT_P016, AV_PIX_FMT_P016 },
  39. { VDP_YCBCR_FORMAT_P010, AV_PIX_FMT_P010 },
  40. #endif
  41. { 0, AV_PIX_FMT_NONE, },
  42. };
  43. static const VDPAUPixFmtMap pix_fmts_422[] = {
  44. { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
  45. { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
  46. { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
  47. { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
  48. { 0, AV_PIX_FMT_NONE, },
  49. };
  50. static const VDPAUPixFmtMap pix_fmts_444[] = {
  51. #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
  52. { VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P },
  53. #endif
  54. #ifdef VDP_YCBCR_FORMAT_P016
  55. {VDP_YCBCR_FORMAT_Y_U_V_444_16, AV_PIX_FMT_YUV444P16},
  56. #endif
  57. { 0, AV_PIX_FMT_NONE, },
  58. };
  59. static const struct {
  60. VdpChromaType chroma_type;
  61. enum AVPixelFormat frames_sw_format;
  62. const VDPAUPixFmtMap *map;
  63. } vdpau_pix_fmts[] = {
  64. { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
  65. { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
  66. { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
  67. #ifdef VDP_YCBCR_FORMAT_P016
  68. { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P10, pix_fmts_420 },
  69. { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P12, pix_fmts_420 },
  70. { VDP_CHROMA_TYPE_422_16, AV_PIX_FMT_YUV422P10, pix_fmts_422 },
  71. { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P10, pix_fmts_444 },
  72. { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P12, pix_fmts_444 },
  73. #endif
  74. };
  75. typedef struct VDPAUDeviceContext {
  76. VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
  77. VdpVideoSurfaceGetBitsYCbCr *get_data;
  78. VdpVideoSurfacePutBitsYCbCr *put_data;
  79. VdpVideoSurfaceCreate *surf_create;
  80. VdpVideoSurfaceDestroy *surf_destroy;
  81. enum AVPixelFormat *pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)];
  82. int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)];
  83. } VDPAUDeviceContext;
  84. typedef struct VDPAUFramesContext {
  85. VdpVideoSurfaceGetBitsYCbCr *get_data;
  86. VdpVideoSurfacePutBitsYCbCr *put_data;
  87. VdpChromaType chroma_type;
  88. int chroma_idx;
  89. const enum AVPixelFormat *pix_fmts;
  90. int nb_pix_fmts;
  91. } VDPAUFramesContext;
  92. static int count_pixfmts(const VDPAUPixFmtMap *map)
  93. {
  94. int count = 0;
  95. while (map->pix_fmt != AV_PIX_FMT_NONE) {
  96. map++;
  97. count++;
  98. }
  99. return count;
  100. }
  101. static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
  102. {
  103. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  104. VDPAUDeviceContext *priv = ctx->internal->priv;
  105. int i;
  106. for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
  107. const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
  108. int nb_pix_fmts;
  109. nb_pix_fmts = count_pixfmts(map);
  110. priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
  111. if (!priv->pix_fmts[i])
  112. return AVERROR(ENOMEM);
  113. nb_pix_fmts = 0;
  114. while (map->pix_fmt != AV_PIX_FMT_NONE) {
  115. VdpBool supported;
  116. VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
  117. map->vdpau_fmt, &supported);
  118. if (err == VDP_STATUS_OK && supported)
  119. priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
  120. map++;
  121. }
  122. priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
  123. priv->nb_pix_fmts[i] = nb_pix_fmts;
  124. }
  125. return 0;
  126. }
  127. #define GET_CALLBACK(id, result) \
  128. do { \
  129. void *tmp; \
  130. err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
  131. if (err != VDP_STATUS_OK) { \
  132. av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
  133. return AVERROR_UNKNOWN; \
  134. } \
  135. result = tmp; \
  136. } while (0)
  137. static int vdpau_device_init(AVHWDeviceContext *ctx)
  138. {
  139. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  140. VDPAUDeviceContext *priv = ctx->internal->priv;
  141. VdpStatus err;
  142. int ret;
  143. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
  144. priv->get_transfer_caps);
  145. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
  146. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
  147. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
  148. GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
  149. ret = vdpau_init_pixmfts(ctx);
  150. if (ret < 0) {
  151. av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
  152. return ret;
  153. }
  154. return 0;
  155. }
  156. static void vdpau_device_uninit(AVHWDeviceContext *ctx)
  157. {
  158. VDPAUDeviceContext *priv = ctx->internal->priv;
  159. int i;
  160. for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
  161. av_freep(&priv->pix_fmts[i]);
  162. }
  163. static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx,
  164. const void *hwconfig,
  165. AVHWFramesConstraints *constraints)
  166. {
  167. VDPAUDeviceContext *priv = ctx->internal->priv;
  168. int nb_sw_formats = 0;
  169. int i;
  170. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(vdpau_pix_fmts) + 1,
  171. sizeof(*constraints->valid_sw_formats));
  172. if (!constraints->valid_sw_formats)
  173. return AVERROR(ENOMEM);
  174. for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
  175. if (priv->nb_pix_fmts[i] > 1)
  176. constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
  177. }
  178. constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
  179. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  180. if (!constraints->valid_hw_formats)
  181. return AVERROR(ENOMEM);
  182. constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
  183. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  184. return 0;
  185. }
  186. static void vdpau_buffer_free(void *opaque, uint8_t *data)
  187. {
  188. AVHWFramesContext *ctx = opaque;
  189. VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  190. VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
  191. device_priv->surf_destroy(surf);
  192. }
  193. static AVBufferRef *vdpau_pool_alloc(void *opaque, buffer_size_t size)
  194. {
  195. AVHWFramesContext *ctx = opaque;
  196. VDPAUFramesContext *priv = ctx->internal->priv;
  197. AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  198. VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  199. AVBufferRef *ret;
  200. VdpVideoSurface surf;
  201. VdpStatus err;
  202. err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
  203. ctx->width, ctx->height, &surf);
  204. if (err != VDP_STATUS_OK) {
  205. av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
  206. return NULL;
  207. }
  208. ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
  209. vdpau_buffer_free, ctx, AV_BUFFER_FLAG_READONLY);
  210. if (!ret) {
  211. device_priv->surf_destroy(surf);
  212. return NULL;
  213. }
  214. return ret;
  215. }
  216. static int vdpau_frames_init(AVHWFramesContext *ctx)
  217. {
  218. VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  219. VDPAUFramesContext *priv = ctx->internal->priv;
  220. int i;
  221. for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
  222. if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
  223. priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
  224. priv->chroma_idx = i;
  225. priv->pix_fmts = device_priv->pix_fmts[i];
  226. priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
  227. break;
  228. }
  229. }
  230. if (priv->nb_pix_fmts < 2) {
  231. av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
  232. av_get_pix_fmt_name(ctx->sw_format));
  233. return AVERROR(ENOSYS);
  234. }
  235. if (!ctx->pool) {
  236. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
  237. vdpau_pool_alloc, NULL);
  238. if (!ctx->internal->pool_internal)
  239. return AVERROR(ENOMEM);
  240. }
  241. priv->get_data = device_priv->get_data;
  242. priv->put_data = device_priv->put_data;
  243. return 0;
  244. }
  245. static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  246. {
  247. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  248. if (!frame->buf[0])
  249. return AVERROR(ENOMEM);
  250. frame->data[3] = frame->buf[0]->data;
  251. frame->format = AV_PIX_FMT_VDPAU;
  252. frame->width = ctx->width;
  253. frame->height = ctx->height;
  254. return 0;
  255. }
  256. static int vdpau_transfer_get_formats(AVHWFramesContext *ctx,
  257. enum AVHWFrameTransferDirection dir,
  258. enum AVPixelFormat **formats)
  259. {
  260. VDPAUFramesContext *priv = ctx->internal->priv;
  261. enum AVPixelFormat *fmts;
  262. if (priv->nb_pix_fmts == 1) {
  263. av_log(ctx, AV_LOG_ERROR,
  264. "No target formats are supported for this chroma type\n");
  265. return AVERROR(ENOSYS);
  266. }
  267. fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
  268. if (!fmts)
  269. return AVERROR(ENOMEM);
  270. memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
  271. *formats = fmts;
  272. return 0;
  273. }
  274. static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  275. const AVFrame *src)
  276. {
  277. VDPAUFramesContext *priv = ctx->internal->priv;
  278. VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
  279. void *data[3];
  280. uint32_t linesize[3];
  281. const VDPAUPixFmtMap *map;
  282. VdpYCbCrFormat vdpau_format;
  283. VdpStatus err;
  284. int i;
  285. for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
  286. data[i] = dst->data[i];
  287. if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
  288. av_log(ctx, AV_LOG_ERROR,
  289. "The linesize %d cannot be represented as uint32\n",
  290. dst->linesize[i]);
  291. return AVERROR(ERANGE);
  292. }
  293. linesize[i] = dst->linesize[i];
  294. }
  295. map = vdpau_pix_fmts[priv->chroma_idx].map;
  296. for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
  297. if (map[i].pix_fmt == dst->format) {
  298. vdpau_format = map[i].vdpau_fmt;
  299. break;
  300. }
  301. }
  302. if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
  303. av_log(ctx, AV_LOG_ERROR,
  304. "Unsupported target pixel format: %s\n",
  305. av_get_pix_fmt_name(dst->format));
  306. return AVERROR(EINVAL);
  307. }
  308. if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
  309. #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
  310. || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
  311. #endif
  312. #ifdef VDP_YCBCR_FORMAT_P016
  313. || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16)
  314. #endif
  315. )
  316. FFSWAP(void*, data[1], data[2]);
  317. err = priv->get_data(surf, vdpau_format, data, linesize);
  318. if (err != VDP_STATUS_OK) {
  319. av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
  320. return AVERROR_UNKNOWN;
  321. }
  322. return 0;
  323. }
  324. static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  325. const AVFrame *src)
  326. {
  327. VDPAUFramesContext *priv = ctx->internal->priv;
  328. VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
  329. const void *data[3];
  330. uint32_t linesize[3];
  331. const VDPAUPixFmtMap *map;
  332. VdpYCbCrFormat vdpau_format;
  333. VdpStatus err;
  334. int i;
  335. for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
  336. data[i] = src->data[i];
  337. if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
  338. av_log(ctx, AV_LOG_ERROR,
  339. "The linesize %d cannot be represented as uint32\n",
  340. src->linesize[i]);
  341. return AVERROR(ERANGE);
  342. }
  343. linesize[i] = src->linesize[i];
  344. }
  345. map = vdpau_pix_fmts[priv->chroma_idx].map;
  346. for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
  347. if (map[i].pix_fmt == src->format) {
  348. vdpau_format = map[i].vdpau_fmt;
  349. break;
  350. }
  351. }
  352. if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
  353. av_log(ctx, AV_LOG_ERROR,
  354. "Unsupported source pixel format: %s\n",
  355. av_get_pix_fmt_name(src->format));
  356. return AVERROR(EINVAL);
  357. }
  358. if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
  359. #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
  360. || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
  361. #endif
  362. )
  363. FFSWAP(const void*, data[1], data[2]);
  364. err = priv->put_data(surf, vdpau_format, data, linesize);
  365. if (err != VDP_STATUS_OK) {
  366. av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
  367. return AVERROR_UNKNOWN;
  368. }
  369. return 0;
  370. }
  371. #if HAVE_VDPAU_X11
  372. #include <vdpau/vdpau_x11.h>
  373. #include <X11/Xlib.h>
  374. typedef struct VDPAUDevicePriv {
  375. VdpDeviceDestroy *device_destroy;
  376. Display *dpy;
  377. } VDPAUDevicePriv;
  378. static void vdpau_device_free(AVHWDeviceContext *ctx)
  379. {
  380. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  381. VDPAUDevicePriv *priv = ctx->user_opaque;
  382. if (priv->device_destroy)
  383. priv->device_destroy(hwctx->device);
  384. if (priv->dpy)
  385. XCloseDisplay(priv->dpy);
  386. av_freep(&priv);
  387. }
  388. static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
  389. AVDictionary *opts, int flags)
  390. {
  391. AVVDPAUDeviceContext *hwctx = ctx->hwctx;
  392. VDPAUDevicePriv *priv;
  393. VdpStatus err;
  394. VdpGetInformationString *get_information_string;
  395. const char *display, *vendor;
  396. priv = av_mallocz(sizeof(*priv));
  397. if (!priv)
  398. return AVERROR(ENOMEM);
  399. ctx->user_opaque = priv;
  400. ctx->free = vdpau_device_free;
  401. priv->dpy = XOpenDisplay(device);
  402. if (!priv->dpy) {
  403. av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
  404. XDisplayName(device));
  405. return AVERROR_UNKNOWN;
  406. }
  407. display = XDisplayString(priv->dpy);
  408. err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
  409. &hwctx->device, &hwctx->get_proc_address);
  410. if (err != VDP_STATUS_OK) {
  411. av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
  412. display);
  413. return AVERROR_UNKNOWN;
  414. }
  415. GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
  416. GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
  417. get_information_string(&vendor);
  418. av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
  419. "X11 display %s\n", vendor, display);
  420. return 0;
  421. }
  422. #endif
  423. const HWContextType ff_hwcontext_type_vdpau = {
  424. .type = AV_HWDEVICE_TYPE_VDPAU,
  425. .name = "VDPAU",
  426. .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
  427. .device_priv_size = sizeof(VDPAUDeviceContext),
  428. .frames_priv_size = sizeof(VDPAUFramesContext),
  429. #if HAVE_VDPAU_X11
  430. .device_create = vdpau_device_create,
  431. #endif
  432. .device_init = vdpau_device_init,
  433. .device_uninit = vdpau_device_uninit,
  434. .frames_get_constraints = vdpau_frames_get_constraints,
  435. .frames_init = vdpau_frames_init,
  436. .frames_get_buffer = vdpau_get_buffer,
  437. .transfer_get_formats = vdpau_transfer_get_formats,
  438. .transfer_data_to = vdpau_transfer_data_to,
  439. .transfer_data_from = vdpau_transfer_data_from,
  440. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE },
  441. };