You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1391 lines
44KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #if HAVE_VAAPI_X11
  20. # include <va/va_x11.h>
  21. #endif
  22. #if HAVE_VAAPI_DRM
  23. # include <va/va_drm.h>
  24. #endif
  25. #if CONFIG_LIBDRM
  26. # include <va/va_drmcommon.h>
  27. # include <drm_fourcc.h>
  28. #endif
  29. #include <fcntl.h>
  30. #if HAVE_UNISTD_H
  31. # include <unistd.h>
  32. #endif
  33. #include "avassert.h"
  34. #include "buffer.h"
  35. #include "common.h"
  36. #include "hwcontext.h"
  37. #include "hwcontext_internal.h"
  38. #include "hwcontext_vaapi.h"
  39. #include "mem.h"
  40. #include "pixdesc.h"
  41. #include "pixfmt.h"
  42. #if CONFIG_LIBDRM
  43. # include "hwcontext_drm.h"
  44. #endif
  45. typedef struct VAAPIDevicePriv {
  46. #if HAVE_VAAPI_X11
  47. Display *x11_display;
  48. #endif
  49. int drm_fd;
  50. } VAAPIDevicePriv;
  51. typedef struct VAAPISurfaceFormat {
  52. enum AVPixelFormat pix_fmt;
  53. VAImageFormat image_format;
  54. } VAAPISurfaceFormat;
  55. typedef struct VAAPIDeviceContext {
  56. // Surface formats which can be used with this device.
  57. VAAPISurfaceFormat *formats;
  58. int nb_formats;
  59. } VAAPIDeviceContext;
  60. typedef struct VAAPIFramesContext {
  61. // Surface attributes set at create time.
  62. VASurfaceAttrib *attributes;
  63. int nb_attributes;
  64. // RT format of the underlying surface (Intel driver ignores this anyway).
  65. unsigned int rt_format;
  66. // Whether vaDeriveImage works.
  67. int derive_works;
  68. } VAAPIFramesContext;
  69. typedef struct VAAPIMapping {
  70. // Handle to the derived or copied image which is mapped.
  71. VAImage image;
  72. // The mapping flags actually used.
  73. int flags;
  74. } VAAPIMapping;
  75. #define MAP(va, rt, av) { \
  76. VA_FOURCC_ ## va, \
  77. VA_RT_FORMAT_ ## rt, \
  78. AV_PIX_FMT_ ## av \
  79. }
  80. // The map fourcc <-> pix_fmt isn't bijective because of the annoying U/V
  81. // plane swap cases. The frame handling below tries to hide these.
  82. static const struct {
  83. unsigned int fourcc;
  84. unsigned int rt_format;
  85. enum AVPixelFormat pix_fmt;
  86. } vaapi_format_map[] = {
  87. MAP(NV12, YUV420, NV12),
  88. MAP(YV12, YUV420, YUV420P), // With U/V planes swapped.
  89. MAP(IYUV, YUV420, YUV420P),
  90. #ifdef VA_FOURCC_I420
  91. MAP(I420, YUV420, YUV420P),
  92. #endif
  93. #ifdef VA_FOURCC_YV16
  94. MAP(YV16, YUV422, YUV422P), // With U/V planes swapped.
  95. #endif
  96. MAP(422H, YUV422, YUV422P),
  97. MAP(UYVY, YUV422, UYVY422),
  98. MAP(YUY2, YUV422, YUYV422),
  99. MAP(411P, YUV411, YUV411P),
  100. MAP(422V, YUV422, YUV440P),
  101. MAP(444P, YUV444, YUV444P),
  102. MAP(Y800, YUV400, GRAY8),
  103. #ifdef VA_FOURCC_P010
  104. MAP(P010, YUV420_10BPP, P010),
  105. #endif
  106. MAP(BGRA, RGB32, BGRA),
  107. MAP(BGRX, RGB32, BGR0),
  108. MAP(RGBA, RGB32, RGBA),
  109. MAP(RGBX, RGB32, RGB0),
  110. #ifdef VA_FOURCC_ABGR
  111. MAP(ABGR, RGB32, ABGR),
  112. MAP(XBGR, RGB32, 0BGR),
  113. #endif
  114. MAP(ARGB, RGB32, ARGB),
  115. MAP(XRGB, RGB32, 0RGB),
  116. };
  117. #undef MAP
  118. static enum AVPixelFormat vaapi_pix_fmt_from_fourcc(unsigned int fourcc)
  119. {
  120. int i;
  121. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_format_map); i++)
  122. if (vaapi_format_map[i].fourcc == fourcc)
  123. return vaapi_format_map[i].pix_fmt;
  124. return AV_PIX_FMT_NONE;
  125. }
  126. static int vaapi_get_image_format(AVHWDeviceContext *hwdev,
  127. enum AVPixelFormat pix_fmt,
  128. VAImageFormat **image_format)
  129. {
  130. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  131. int i;
  132. for (i = 0; i < ctx->nb_formats; i++) {
  133. if (ctx->formats[i].pix_fmt == pix_fmt) {
  134. if (image_format)
  135. *image_format = &ctx->formats[i].image_format;
  136. return 0;
  137. }
  138. }
  139. return AVERROR(EINVAL);
  140. }
  141. static int vaapi_frames_get_constraints(AVHWDeviceContext *hwdev,
  142. const void *hwconfig,
  143. AVHWFramesConstraints *constraints)
  144. {
  145. AVVAAPIDeviceContext *hwctx = hwdev->hwctx;
  146. const AVVAAPIHWConfig *config = hwconfig;
  147. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  148. VASurfaceAttrib *attr_list = NULL;
  149. VAStatus vas;
  150. enum AVPixelFormat pix_fmt;
  151. unsigned int fourcc;
  152. int err, i, j, attr_count, pix_fmt_count;
  153. if (config &&
  154. !(hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES)) {
  155. attr_count = 0;
  156. vas = vaQuerySurfaceAttributes(hwctx->display, config->config_id,
  157. 0, &attr_count);
  158. if (vas != VA_STATUS_SUCCESS) {
  159. av_log(hwdev, AV_LOG_ERROR, "Failed to query surface attributes: "
  160. "%d (%s).\n", vas, vaErrorStr(vas));
  161. err = AVERROR(ENOSYS);
  162. goto fail;
  163. }
  164. attr_list = av_malloc(attr_count * sizeof(*attr_list));
  165. if (!attr_list) {
  166. err = AVERROR(ENOMEM);
  167. goto fail;
  168. }
  169. vas = vaQuerySurfaceAttributes(hwctx->display, config->config_id,
  170. attr_list, &attr_count);
  171. if (vas != VA_STATUS_SUCCESS) {
  172. av_log(hwdev, AV_LOG_ERROR, "Failed to query surface attributes: "
  173. "%d (%s).\n", vas, vaErrorStr(vas));
  174. err = AVERROR(ENOSYS);
  175. goto fail;
  176. }
  177. pix_fmt_count = 0;
  178. for (i = 0; i < attr_count; i++) {
  179. switch (attr_list[i].type) {
  180. case VASurfaceAttribPixelFormat:
  181. fourcc = attr_list[i].value.value.i;
  182. pix_fmt = vaapi_pix_fmt_from_fourcc(fourcc);
  183. if (pix_fmt != AV_PIX_FMT_NONE) {
  184. ++pix_fmt_count;
  185. } else {
  186. // Something unsupported - ignore.
  187. }
  188. break;
  189. case VASurfaceAttribMinWidth:
  190. constraints->min_width = attr_list[i].value.value.i;
  191. break;
  192. case VASurfaceAttribMinHeight:
  193. constraints->min_height = attr_list[i].value.value.i;
  194. break;
  195. case VASurfaceAttribMaxWidth:
  196. constraints->max_width = attr_list[i].value.value.i;
  197. break;
  198. case VASurfaceAttribMaxHeight:
  199. constraints->max_height = attr_list[i].value.value.i;
  200. break;
  201. }
  202. }
  203. if (pix_fmt_count == 0) {
  204. // Nothing usable found. Presumably there exists something which
  205. // works, so leave the set null to indicate unknown.
  206. constraints->valid_sw_formats = NULL;
  207. } else {
  208. constraints->valid_sw_formats = av_malloc_array(pix_fmt_count + 1,
  209. sizeof(pix_fmt));
  210. if (!constraints->valid_sw_formats) {
  211. err = AVERROR(ENOMEM);
  212. goto fail;
  213. }
  214. for (i = j = 0; i < attr_count; i++) {
  215. if (attr_list[i].type != VASurfaceAttribPixelFormat)
  216. continue;
  217. fourcc = attr_list[i].value.value.i;
  218. pix_fmt = vaapi_pix_fmt_from_fourcc(fourcc);
  219. if (pix_fmt != AV_PIX_FMT_NONE)
  220. constraints->valid_sw_formats[j++] = pix_fmt;
  221. }
  222. av_assert0(j == pix_fmt_count);
  223. constraints->valid_sw_formats[j] = AV_PIX_FMT_NONE;
  224. }
  225. } else {
  226. // No configuration supplied.
  227. // Return the full set of image formats known by the implementation.
  228. constraints->valid_sw_formats = av_malloc_array(ctx->nb_formats + 1,
  229. sizeof(pix_fmt));
  230. if (!constraints->valid_sw_formats) {
  231. err = AVERROR(ENOMEM);
  232. goto fail;
  233. }
  234. for (i = 0; i < ctx->nb_formats; i++)
  235. constraints->valid_sw_formats[i] = ctx->formats[i].pix_fmt;
  236. constraints->valid_sw_formats[i] = AV_PIX_FMT_NONE;
  237. }
  238. constraints->valid_hw_formats = av_malloc_array(2, sizeof(pix_fmt));
  239. if (!constraints->valid_hw_formats) {
  240. err = AVERROR(ENOMEM);
  241. goto fail;
  242. }
  243. constraints->valid_hw_formats[0] = AV_PIX_FMT_VAAPI;
  244. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  245. err = 0;
  246. fail:
  247. av_freep(&attr_list);
  248. return err;
  249. }
  250. static const struct {
  251. const char *friendly_name;
  252. const char *match_string;
  253. unsigned int quirks;
  254. } vaapi_driver_quirks_table[] = {
  255. {
  256. "Intel i965 (Quick Sync)",
  257. "i965",
  258. AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS,
  259. },
  260. {
  261. "Intel iHD",
  262. "ubit",
  263. AV_VAAPI_DRIVER_QUIRK_ATTRIB_MEMTYPE,
  264. },
  265. {
  266. "VDPAU wrapper",
  267. "Splitted-Desktop Systems VDPAU backend for VA-API",
  268. AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES,
  269. },
  270. };
  271. static int vaapi_device_init(AVHWDeviceContext *hwdev)
  272. {
  273. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  274. AVVAAPIDeviceContext *hwctx = hwdev->hwctx;
  275. VAImageFormat *image_list = NULL;
  276. VAStatus vas;
  277. const char *vendor_string;
  278. int err, i, image_count;
  279. enum AVPixelFormat pix_fmt;
  280. unsigned int fourcc;
  281. image_count = vaMaxNumImageFormats(hwctx->display);
  282. if (image_count <= 0) {
  283. err = AVERROR(EIO);
  284. goto fail;
  285. }
  286. image_list = av_malloc(image_count * sizeof(*image_list));
  287. if (!image_list) {
  288. err = AVERROR(ENOMEM);
  289. goto fail;
  290. }
  291. vas = vaQueryImageFormats(hwctx->display, image_list, &image_count);
  292. if (vas != VA_STATUS_SUCCESS) {
  293. err = AVERROR(EIO);
  294. goto fail;
  295. }
  296. ctx->formats = av_malloc(image_count * sizeof(*ctx->formats));
  297. if (!ctx->formats) {
  298. err = AVERROR(ENOMEM);
  299. goto fail;
  300. }
  301. ctx->nb_formats = 0;
  302. for (i = 0; i < image_count; i++) {
  303. fourcc = image_list[i].fourcc;
  304. pix_fmt = vaapi_pix_fmt_from_fourcc(fourcc);
  305. if (pix_fmt == AV_PIX_FMT_NONE) {
  306. av_log(hwdev, AV_LOG_DEBUG, "Format %#x -> unknown.\n",
  307. fourcc);
  308. } else {
  309. av_log(hwdev, AV_LOG_DEBUG, "Format %#x -> %s.\n",
  310. fourcc, av_get_pix_fmt_name(pix_fmt));
  311. ctx->formats[ctx->nb_formats].pix_fmt = pix_fmt;
  312. ctx->formats[ctx->nb_formats].image_format = image_list[i];
  313. ++ctx->nb_formats;
  314. }
  315. }
  316. if (hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_USER_SET) {
  317. av_log(hwdev, AV_LOG_VERBOSE, "Not detecting driver: "
  318. "quirks set by user.\n");
  319. } else {
  320. // Detect the driver in use and set quirk flags if necessary.
  321. vendor_string = vaQueryVendorString(hwctx->display);
  322. hwctx->driver_quirks = 0;
  323. if (vendor_string) {
  324. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_driver_quirks_table); i++) {
  325. if (strstr(vendor_string,
  326. vaapi_driver_quirks_table[i].match_string)) {
  327. av_log(hwdev, AV_LOG_VERBOSE, "Matched \"%s\" as known "
  328. "driver \"%s\".\n", vendor_string,
  329. vaapi_driver_quirks_table[i].friendly_name);
  330. hwctx->driver_quirks |=
  331. vaapi_driver_quirks_table[i].quirks;
  332. break;
  333. }
  334. }
  335. if (!(i < FF_ARRAY_ELEMS(vaapi_driver_quirks_table))) {
  336. av_log(hwdev, AV_LOG_VERBOSE, "Unknown driver \"%s\", "
  337. "assuming standard behaviour.\n", vendor_string);
  338. }
  339. }
  340. }
  341. av_free(image_list);
  342. return 0;
  343. fail:
  344. av_freep(&ctx->formats);
  345. av_free(image_list);
  346. return err;
  347. }
  348. static void vaapi_device_uninit(AVHWDeviceContext *hwdev)
  349. {
  350. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  351. av_freep(&ctx->formats);
  352. }
  353. static void vaapi_buffer_free(void *opaque, uint8_t *data)
  354. {
  355. AVHWFramesContext *hwfc = opaque;
  356. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  357. VASurfaceID surface_id;
  358. VAStatus vas;
  359. surface_id = (VASurfaceID)(uintptr_t)data;
  360. vas = vaDestroySurfaces(hwctx->display, &surface_id, 1);
  361. if (vas != VA_STATUS_SUCCESS) {
  362. av_log(hwfc, AV_LOG_ERROR, "Failed to destroy surface %#x: "
  363. "%d (%s).\n", surface_id, vas, vaErrorStr(vas));
  364. }
  365. }
  366. static AVBufferRef *vaapi_pool_alloc(void *opaque, int size)
  367. {
  368. AVHWFramesContext *hwfc = opaque;
  369. VAAPIFramesContext *ctx = hwfc->internal->priv;
  370. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  371. AVVAAPIFramesContext *avfc = hwfc->hwctx;
  372. VASurfaceID surface_id;
  373. VAStatus vas;
  374. AVBufferRef *ref;
  375. if (hwfc->initial_pool_size > 0 &&
  376. avfc->nb_surfaces >= hwfc->initial_pool_size)
  377. return NULL;
  378. vas = vaCreateSurfaces(hwctx->display, ctx->rt_format,
  379. hwfc->width, hwfc->height,
  380. &surface_id, 1,
  381. ctx->attributes, ctx->nb_attributes);
  382. if (vas != VA_STATUS_SUCCESS) {
  383. av_log(hwfc, AV_LOG_ERROR, "Failed to create surface: "
  384. "%d (%s).\n", vas, vaErrorStr(vas));
  385. return NULL;
  386. }
  387. av_log(hwfc, AV_LOG_DEBUG, "Created surface %#x.\n", surface_id);
  388. ref = av_buffer_create((uint8_t*)(uintptr_t)surface_id,
  389. sizeof(surface_id), &vaapi_buffer_free,
  390. hwfc, AV_BUFFER_FLAG_READONLY);
  391. if (!ref) {
  392. vaDestroySurfaces(hwctx->display, &surface_id, 1);
  393. return NULL;
  394. }
  395. if (hwfc->initial_pool_size > 0) {
  396. // This is a fixed-size pool, so we must still be in the initial
  397. // allocation sequence.
  398. av_assert0(avfc->nb_surfaces < hwfc->initial_pool_size);
  399. avfc->surface_ids[avfc->nb_surfaces] = surface_id;
  400. ++avfc->nb_surfaces;
  401. }
  402. return ref;
  403. }
  404. static int vaapi_frames_init(AVHWFramesContext *hwfc)
  405. {
  406. AVVAAPIFramesContext *avfc = hwfc->hwctx;
  407. VAAPIFramesContext *ctx = hwfc->internal->priv;
  408. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  409. VAImageFormat *expected_format;
  410. AVBufferRef *test_surface = NULL;
  411. VASurfaceID test_surface_id;
  412. VAImage test_image;
  413. VAStatus vas;
  414. int err, i;
  415. unsigned int fourcc, rt_format;
  416. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_format_map); i++) {
  417. if (vaapi_format_map[i].pix_fmt == hwfc->sw_format) {
  418. fourcc = vaapi_format_map[i].fourcc;
  419. rt_format = vaapi_format_map[i].rt_format;
  420. break;
  421. }
  422. }
  423. if (i >= FF_ARRAY_ELEMS(vaapi_format_map)) {
  424. av_log(hwfc, AV_LOG_ERROR, "Unsupported format: %s.\n",
  425. av_get_pix_fmt_name(hwfc->sw_format));
  426. return AVERROR(EINVAL);
  427. }
  428. if (!hwfc->pool) {
  429. if (!(hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES)) {
  430. int need_memory_type = !(hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_ATTRIB_MEMTYPE);
  431. int need_pixel_format = 1;
  432. for (i = 0; i < avfc->nb_attributes; i++) {
  433. if (avfc->attributes[i].type == VASurfaceAttribMemoryType)
  434. need_memory_type = 0;
  435. if (avfc->attributes[i].type == VASurfaceAttribPixelFormat)
  436. need_pixel_format = 0;
  437. }
  438. ctx->nb_attributes =
  439. avfc->nb_attributes + need_memory_type + need_pixel_format;
  440. ctx->attributes = av_malloc(ctx->nb_attributes *
  441. sizeof(*ctx->attributes));
  442. if (!ctx->attributes) {
  443. err = AVERROR(ENOMEM);
  444. goto fail;
  445. }
  446. for (i = 0; i < avfc->nb_attributes; i++)
  447. ctx->attributes[i] = avfc->attributes[i];
  448. if (need_memory_type) {
  449. ctx->attributes[i++] = (VASurfaceAttrib) {
  450. .type = VASurfaceAttribMemoryType,
  451. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  452. .value.type = VAGenericValueTypeInteger,
  453. .value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA,
  454. };
  455. }
  456. if (need_pixel_format) {
  457. ctx->attributes[i++] = (VASurfaceAttrib) {
  458. .type = VASurfaceAttribPixelFormat,
  459. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  460. .value.type = VAGenericValueTypeInteger,
  461. .value.value.i = fourcc,
  462. };
  463. }
  464. av_assert0(i == ctx->nb_attributes);
  465. } else {
  466. ctx->attributes = NULL;
  467. ctx->nb_attributes = 0;
  468. }
  469. ctx->rt_format = rt_format;
  470. if (hwfc->initial_pool_size > 0) {
  471. // This pool will be usable as a render target, so we need to store
  472. // all of the surface IDs somewhere that vaCreateContext() calls
  473. // will be able to access them.
  474. avfc->nb_surfaces = 0;
  475. avfc->surface_ids = av_malloc(hwfc->initial_pool_size *
  476. sizeof(*avfc->surface_ids));
  477. if (!avfc->surface_ids) {
  478. err = AVERROR(ENOMEM);
  479. goto fail;
  480. }
  481. } else {
  482. // This pool allows dynamic sizing, and will not be usable as a
  483. // render target.
  484. avfc->nb_surfaces = 0;
  485. avfc->surface_ids = NULL;
  486. }
  487. hwfc->internal->pool_internal =
  488. av_buffer_pool_init2(sizeof(VASurfaceID), hwfc,
  489. &vaapi_pool_alloc, NULL);
  490. if (!hwfc->internal->pool_internal) {
  491. av_log(hwfc, AV_LOG_ERROR, "Failed to create VAAPI surface pool.\n");
  492. err = AVERROR(ENOMEM);
  493. goto fail;
  494. }
  495. }
  496. // Allocate a single surface to test whether vaDeriveImage() is going
  497. // to work for the specific configuration.
  498. if (hwfc->pool) {
  499. test_surface = av_buffer_pool_get(hwfc->pool);
  500. if (!test_surface) {
  501. av_log(hwfc, AV_LOG_ERROR, "Unable to allocate a surface from "
  502. "user-configured buffer pool.\n");
  503. err = AVERROR(ENOMEM);
  504. goto fail;
  505. }
  506. } else {
  507. test_surface = av_buffer_pool_get(hwfc->internal->pool_internal);
  508. if (!test_surface) {
  509. av_log(hwfc, AV_LOG_ERROR, "Unable to allocate a surface from "
  510. "internal buffer pool.\n");
  511. err = AVERROR(ENOMEM);
  512. goto fail;
  513. }
  514. }
  515. test_surface_id = (VASurfaceID)(uintptr_t)test_surface->data;
  516. ctx->derive_works = 0;
  517. err = vaapi_get_image_format(hwfc->device_ctx,
  518. hwfc->sw_format, &expected_format);
  519. if (err == 0) {
  520. vas = vaDeriveImage(hwctx->display, test_surface_id, &test_image);
  521. if (vas == VA_STATUS_SUCCESS) {
  522. if (expected_format->fourcc == test_image.format.fourcc) {
  523. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping possible.\n");
  524. ctx->derive_works = 1;
  525. } else {
  526. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping disabled: "
  527. "derived image format %08x does not match "
  528. "expected format %08x.\n",
  529. expected_format->fourcc, test_image.format.fourcc);
  530. }
  531. vaDestroyImage(hwctx->display, test_image.image_id);
  532. } else {
  533. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping disabled: "
  534. "deriving image does not work: "
  535. "%d (%s).\n", vas, vaErrorStr(vas));
  536. }
  537. } else {
  538. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping disabled: "
  539. "image format is not supported.\n");
  540. }
  541. av_buffer_unref(&test_surface);
  542. return 0;
  543. fail:
  544. av_buffer_unref(&test_surface);
  545. av_freep(&avfc->surface_ids);
  546. av_freep(&ctx->attributes);
  547. return err;
  548. }
  549. static void vaapi_frames_uninit(AVHWFramesContext *hwfc)
  550. {
  551. AVVAAPIFramesContext *avfc = hwfc->hwctx;
  552. VAAPIFramesContext *ctx = hwfc->internal->priv;
  553. av_freep(&avfc->surface_ids);
  554. av_freep(&ctx->attributes);
  555. }
  556. static int vaapi_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
  557. {
  558. frame->buf[0] = av_buffer_pool_get(hwfc->pool);
  559. if (!frame->buf[0])
  560. return AVERROR(ENOMEM);
  561. frame->data[3] = frame->buf[0]->data;
  562. frame->format = AV_PIX_FMT_VAAPI;
  563. frame->width = hwfc->width;
  564. frame->height = hwfc->height;
  565. return 0;
  566. }
  567. static int vaapi_transfer_get_formats(AVHWFramesContext *hwfc,
  568. enum AVHWFrameTransferDirection dir,
  569. enum AVPixelFormat **formats)
  570. {
  571. VAAPIDeviceContext *ctx = hwfc->device_ctx->internal->priv;
  572. enum AVPixelFormat *pix_fmts;
  573. int i, k, sw_format_available;
  574. sw_format_available = 0;
  575. for (i = 0; i < ctx->nb_formats; i++) {
  576. if (ctx->formats[i].pix_fmt == hwfc->sw_format)
  577. sw_format_available = 1;
  578. }
  579. pix_fmts = av_malloc((ctx->nb_formats + 1) * sizeof(*pix_fmts));
  580. if (!pix_fmts)
  581. return AVERROR(ENOMEM);
  582. if (sw_format_available) {
  583. pix_fmts[0] = hwfc->sw_format;
  584. k = 1;
  585. } else {
  586. k = 0;
  587. }
  588. for (i = 0; i < ctx->nb_formats; i++) {
  589. if (ctx->formats[i].pix_fmt == hwfc->sw_format)
  590. continue;
  591. av_assert0(k < ctx->nb_formats);
  592. pix_fmts[k++] = ctx->formats[i].pix_fmt;
  593. }
  594. pix_fmts[k] = AV_PIX_FMT_NONE;
  595. *formats = pix_fmts;
  596. return 0;
  597. }
  598. static void vaapi_unmap_frame(AVHWFramesContext *hwfc,
  599. HWMapDescriptor *hwmap)
  600. {
  601. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  602. VAAPIMapping *map = hwmap->priv;
  603. VASurfaceID surface_id;
  604. VAStatus vas;
  605. surface_id = (VASurfaceID)(uintptr_t)hwmap->source->data[3];
  606. av_log(hwfc, AV_LOG_DEBUG, "Unmap surface %#x.\n", surface_id);
  607. vas = vaUnmapBuffer(hwctx->display, map->image.buf);
  608. if (vas != VA_STATUS_SUCCESS) {
  609. av_log(hwfc, AV_LOG_ERROR, "Failed to unmap image from surface "
  610. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  611. }
  612. if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
  613. !(map->flags & AV_HWFRAME_MAP_DIRECT)) {
  614. vas = vaPutImage(hwctx->display, surface_id, map->image.image_id,
  615. 0, 0, hwfc->width, hwfc->height,
  616. 0, 0, hwfc->width, hwfc->height);
  617. if (vas != VA_STATUS_SUCCESS) {
  618. av_log(hwfc, AV_LOG_ERROR, "Failed to write image to surface "
  619. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  620. }
  621. }
  622. vas = vaDestroyImage(hwctx->display, map->image.image_id);
  623. if (vas != VA_STATUS_SUCCESS) {
  624. av_log(hwfc, AV_LOG_ERROR, "Failed to destroy image from surface "
  625. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  626. }
  627. av_free(map);
  628. }
  629. static int vaapi_map_frame(AVHWFramesContext *hwfc,
  630. AVFrame *dst, const AVFrame *src, int flags)
  631. {
  632. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  633. VAAPIFramesContext *ctx = hwfc->internal->priv;
  634. VASurfaceID surface_id;
  635. VAImageFormat *image_format;
  636. VAAPIMapping *map;
  637. VAStatus vas;
  638. void *address = NULL;
  639. int err, i;
  640. surface_id = (VASurfaceID)(uintptr_t)src->data[3];
  641. av_log(hwfc, AV_LOG_DEBUG, "Map surface %#x.\n", surface_id);
  642. if (!ctx->derive_works && (flags & AV_HWFRAME_MAP_DIRECT)) {
  643. // Requested direct mapping but it is not possible.
  644. return AVERROR(EINVAL);
  645. }
  646. if (dst->format == AV_PIX_FMT_NONE)
  647. dst->format = hwfc->sw_format;
  648. if (dst->format != hwfc->sw_format && (flags & AV_HWFRAME_MAP_DIRECT)) {
  649. // Requested direct mapping but the formats do not match.
  650. return AVERROR(EINVAL);
  651. }
  652. err = vaapi_get_image_format(hwfc->device_ctx, dst->format, &image_format);
  653. if (err < 0) {
  654. // Requested format is not a valid output format.
  655. return AVERROR(EINVAL);
  656. }
  657. map = av_malloc(sizeof(*map));
  658. if (!map)
  659. return AVERROR(ENOMEM);
  660. map->flags = flags;
  661. map->image.image_id = VA_INVALID_ID;
  662. vas = vaSyncSurface(hwctx->display, surface_id);
  663. if (vas != VA_STATUS_SUCCESS) {
  664. av_log(hwfc, AV_LOG_ERROR, "Failed to sync surface "
  665. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  666. err = AVERROR(EIO);
  667. goto fail;
  668. }
  669. // The memory which we map using derive need not be connected to the CPU
  670. // in a way conducive to fast access. On Gen7-Gen9 Intel graphics, the
  671. // memory is mappable but not cached, so normal memcpy()-like access is
  672. // very slow to read it (but writing is ok). It is possible to read much
  673. // faster with a copy routine which is aware of the limitation, but we
  674. // assume for now that the user is not aware of that and would therefore
  675. // prefer not to be given direct-mapped memory if they request read access.
  676. if (ctx->derive_works && dst->format == hwfc->sw_format &&
  677. ((flags & AV_HWFRAME_MAP_DIRECT) || !(flags & AV_HWFRAME_MAP_READ))) {
  678. vas = vaDeriveImage(hwctx->display, surface_id, &map->image);
  679. if (vas != VA_STATUS_SUCCESS) {
  680. av_log(hwfc, AV_LOG_ERROR, "Failed to derive image from "
  681. "surface %#x: %d (%s).\n",
  682. surface_id, vas, vaErrorStr(vas));
  683. err = AVERROR(EIO);
  684. goto fail;
  685. }
  686. if (map->image.format.fourcc != image_format->fourcc) {
  687. av_log(hwfc, AV_LOG_ERROR, "Derive image of surface %#x "
  688. "is in wrong format: expected %#08x, got %#08x.\n",
  689. surface_id, image_format->fourcc, map->image.format.fourcc);
  690. err = AVERROR(EIO);
  691. goto fail;
  692. }
  693. map->flags |= AV_HWFRAME_MAP_DIRECT;
  694. } else {
  695. vas = vaCreateImage(hwctx->display, image_format,
  696. hwfc->width, hwfc->height, &map->image);
  697. if (vas != VA_STATUS_SUCCESS) {
  698. av_log(hwfc, AV_LOG_ERROR, "Failed to create image for "
  699. "surface %#x: %d (%s).\n",
  700. surface_id, vas, vaErrorStr(vas));
  701. err = AVERROR(EIO);
  702. goto fail;
  703. }
  704. if (!(flags & AV_HWFRAME_MAP_OVERWRITE)) {
  705. vas = vaGetImage(hwctx->display, surface_id, 0, 0,
  706. hwfc->width, hwfc->height, map->image.image_id);
  707. if (vas != VA_STATUS_SUCCESS) {
  708. av_log(hwfc, AV_LOG_ERROR, "Failed to read image from "
  709. "surface %#x: %d (%s).\n",
  710. surface_id, vas, vaErrorStr(vas));
  711. err = AVERROR(EIO);
  712. goto fail;
  713. }
  714. }
  715. }
  716. vas = vaMapBuffer(hwctx->display, map->image.buf, &address);
  717. if (vas != VA_STATUS_SUCCESS) {
  718. av_log(hwfc, AV_LOG_ERROR, "Failed to map image from surface "
  719. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  720. err = AVERROR(EIO);
  721. goto fail;
  722. }
  723. err = ff_hwframe_map_create(src->hw_frames_ctx,
  724. dst, src, &vaapi_unmap_frame, map);
  725. if (err < 0)
  726. goto fail;
  727. dst->width = src->width;
  728. dst->height = src->height;
  729. for (i = 0; i < map->image.num_planes; i++) {
  730. dst->data[i] = (uint8_t*)address + map->image.offsets[i];
  731. dst->linesize[i] = map->image.pitches[i];
  732. }
  733. if (
  734. #ifdef VA_FOURCC_YV16
  735. map->image.format.fourcc == VA_FOURCC_YV16 ||
  736. #endif
  737. map->image.format.fourcc == VA_FOURCC_YV12) {
  738. // Chroma planes are YVU rather than YUV, so swap them.
  739. FFSWAP(uint8_t*, dst->data[1], dst->data[2]);
  740. }
  741. return 0;
  742. fail:
  743. if (map) {
  744. if (address)
  745. vaUnmapBuffer(hwctx->display, map->image.buf);
  746. if (map->image.image_id != VA_INVALID_ID)
  747. vaDestroyImage(hwctx->display, map->image.image_id);
  748. av_free(map);
  749. }
  750. return err;
  751. }
  752. static int vaapi_transfer_data_from(AVHWFramesContext *hwfc,
  753. AVFrame *dst, const AVFrame *src)
  754. {
  755. AVFrame *map;
  756. int err;
  757. if (dst->width > hwfc->width || dst->height > hwfc->height)
  758. return AVERROR(EINVAL);
  759. map = av_frame_alloc();
  760. if (!map)
  761. return AVERROR(ENOMEM);
  762. map->format = dst->format;
  763. err = vaapi_map_frame(hwfc, map, src, AV_HWFRAME_MAP_READ);
  764. if (err)
  765. goto fail;
  766. map->width = dst->width;
  767. map->height = dst->height;
  768. err = av_frame_copy(dst, map);
  769. if (err)
  770. goto fail;
  771. err = 0;
  772. fail:
  773. av_frame_free(&map);
  774. return err;
  775. }
  776. static int vaapi_transfer_data_to(AVHWFramesContext *hwfc,
  777. AVFrame *dst, const AVFrame *src)
  778. {
  779. AVFrame *map;
  780. int err;
  781. if (src->width > hwfc->width || src->height > hwfc->height)
  782. return AVERROR(EINVAL);
  783. map = av_frame_alloc();
  784. if (!map)
  785. return AVERROR(ENOMEM);
  786. map->format = src->format;
  787. err = vaapi_map_frame(hwfc, map, dst, AV_HWFRAME_MAP_WRITE | AV_HWFRAME_MAP_OVERWRITE);
  788. if (err)
  789. goto fail;
  790. map->width = src->width;
  791. map->height = src->height;
  792. err = av_frame_copy(map, src);
  793. if (err)
  794. goto fail;
  795. err = 0;
  796. fail:
  797. av_frame_free(&map);
  798. return err;
  799. }
  800. static int vaapi_map_to_memory(AVHWFramesContext *hwfc, AVFrame *dst,
  801. const AVFrame *src, int flags)
  802. {
  803. int err;
  804. if (dst->format != AV_PIX_FMT_NONE) {
  805. err = vaapi_get_image_format(hwfc->device_ctx, dst->format, NULL);
  806. if (err < 0)
  807. return AVERROR(ENOSYS);
  808. }
  809. err = vaapi_map_frame(hwfc, dst, src, flags);
  810. if (err)
  811. return err;
  812. err = av_frame_copy_props(dst, src);
  813. if (err)
  814. return err;
  815. return 0;
  816. }
  817. #if CONFIG_LIBDRM
  818. #define DRM_MAP(va, layers, ...) { \
  819. VA_FOURCC_ ## va, \
  820. layers, \
  821. { __VA_ARGS__ } \
  822. }
  823. static const struct {
  824. uint32_t va_fourcc;
  825. int nb_layer_formats;
  826. uint32_t layer_formats[AV_DRM_MAX_PLANES];
  827. } vaapi_drm_format_map[] = {
  828. #ifdef DRM_FORMAT_R8
  829. DRM_MAP(NV12, 2, DRM_FORMAT_R8, DRM_FORMAT_RG88),
  830. #endif
  831. DRM_MAP(NV12, 1, DRM_FORMAT_NV12),
  832. #if defined(VA_FOURCC_P010) && defined(DRM_FORMAT_R16)
  833. DRM_MAP(P010, 2, DRM_FORMAT_R16, DRM_FORMAT_RG1616),
  834. #endif
  835. DRM_MAP(BGRA, 1, DRM_FORMAT_ARGB8888),
  836. DRM_MAP(BGRX, 1, DRM_FORMAT_XRGB8888),
  837. DRM_MAP(RGBA, 1, DRM_FORMAT_ABGR8888),
  838. DRM_MAP(RGBX, 1, DRM_FORMAT_XBGR8888),
  839. #ifdef VA_FOURCC_ABGR
  840. DRM_MAP(ABGR, 1, DRM_FORMAT_RGBA8888),
  841. DRM_MAP(XBGR, 1, DRM_FORMAT_RGBX8888),
  842. #endif
  843. DRM_MAP(ARGB, 1, DRM_FORMAT_BGRA8888),
  844. DRM_MAP(XRGB, 1, DRM_FORMAT_BGRX8888),
  845. };
  846. #undef DRM_MAP
  847. static void vaapi_unmap_from_drm(AVHWFramesContext *dst_fc,
  848. HWMapDescriptor *hwmap)
  849. {
  850. AVVAAPIDeviceContext *dst_dev = dst_fc->device_ctx->hwctx;
  851. VASurfaceID surface_id = (VASurfaceID)(uintptr_t)hwmap->priv;
  852. av_log(dst_fc, AV_LOG_DEBUG, "Destroy surface %#x.\n", surface_id);
  853. vaDestroySurfaces(dst_dev->display, &surface_id, 1);
  854. }
  855. static int vaapi_map_from_drm(AVHWFramesContext *src_fc, AVFrame *dst,
  856. const AVFrame *src, int flags)
  857. {
  858. AVHWFramesContext *dst_fc =
  859. (AVHWFramesContext*)dst->hw_frames_ctx->data;
  860. AVVAAPIDeviceContext *dst_dev = dst_fc->device_ctx->hwctx;
  861. const AVDRMFrameDescriptor *desc;
  862. VASurfaceID surface_id;
  863. VAStatus vas;
  864. uint32_t va_fourcc, va_rt_format;
  865. int err, i, j, k;
  866. unsigned long buffer_handle;
  867. VASurfaceAttribExternalBuffers buffer_desc;
  868. VASurfaceAttrib attrs[2] = {
  869. {
  870. .type = VASurfaceAttribMemoryType,
  871. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  872. .value.type = VAGenericValueTypeInteger,
  873. .value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME,
  874. },
  875. {
  876. .type = VASurfaceAttribExternalBufferDescriptor,
  877. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  878. .value.type = VAGenericValueTypePointer,
  879. .value.value.p = &buffer_desc,
  880. }
  881. };
  882. desc = (AVDRMFrameDescriptor*)src->data[0];
  883. if (desc->nb_objects != 1) {
  884. av_log(dst_fc, AV_LOG_ERROR, "VAAPI can only map frames "
  885. "made from a single DRM object.\n");
  886. return AVERROR(EINVAL);
  887. }
  888. va_fourcc = 0;
  889. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_drm_format_map); i++) {
  890. if (desc->nb_layers != vaapi_drm_format_map[i].nb_layer_formats)
  891. continue;
  892. for (j = 0; j < desc->nb_layers; j++) {
  893. if (desc->layers[j].format !=
  894. vaapi_drm_format_map[i].layer_formats[j])
  895. break;
  896. }
  897. if (j != desc->nb_layers)
  898. continue;
  899. va_fourcc = vaapi_drm_format_map[i].va_fourcc;
  900. break;
  901. }
  902. if (!va_fourcc) {
  903. av_log(dst_fc, AV_LOG_ERROR, "DRM format not supported "
  904. "by VAAPI.\n");
  905. return AVERROR(EINVAL);
  906. }
  907. av_log(dst_fc, AV_LOG_DEBUG, "Map DRM object %d to VAAPI as "
  908. "%08x.\n", desc->objects[0].fd, va_fourcc);
  909. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_format_map); i++) {
  910. if (vaapi_format_map[i].fourcc == va_fourcc)
  911. va_rt_format = vaapi_format_map[i].rt_format;
  912. }
  913. buffer_handle = desc->objects[0].fd;
  914. buffer_desc.pixel_format = va_fourcc;
  915. buffer_desc.width = src_fc->width;
  916. buffer_desc.height = src_fc->height;
  917. buffer_desc.data_size = desc->objects[0].size;
  918. buffer_desc.buffers = &buffer_handle;
  919. buffer_desc.num_buffers = 1;
  920. buffer_desc.flags = 0;
  921. k = 0;
  922. for (i = 0; i < desc->nb_layers; i++) {
  923. for (j = 0; j < desc->layers[i].nb_planes; j++) {
  924. buffer_desc.pitches[k] = desc->layers[i].planes[j].pitch;
  925. buffer_desc.offsets[k] = desc->layers[i].planes[j].offset;
  926. ++k;
  927. }
  928. }
  929. buffer_desc.num_planes = k;
  930. vas = vaCreateSurfaces(dst_dev->display, va_rt_format,
  931. src->width, src->height,
  932. &surface_id, 1,
  933. attrs, FF_ARRAY_ELEMS(attrs));
  934. if (vas != VA_STATUS_SUCCESS) {
  935. av_log(dst_fc, AV_LOG_ERROR, "Failed to create surface from DRM "
  936. "object: %d (%s).\n", vas, vaErrorStr(vas));
  937. return AVERROR(EIO);
  938. }
  939. av_log(dst_fc, AV_LOG_DEBUG, "Create surface %#x.\n", surface_id);
  940. err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
  941. &vaapi_unmap_from_drm,
  942. (void*)(uintptr_t)surface_id);
  943. if (err < 0)
  944. return err;
  945. dst->width = src->width;
  946. dst->height = src->height;
  947. dst->data[3] = (uint8_t*)(uintptr_t)surface_id;
  948. av_log(dst_fc, AV_LOG_DEBUG, "Mapped DRM object %d to "
  949. "surface %#x.\n", desc->objects[0].fd, surface_id);
  950. return 0;
  951. }
  952. static void vaapi_unmap_to_drm(AVHWFramesContext *dst_fc,
  953. HWMapDescriptor *hwmap)
  954. {
  955. AVDRMFrameDescriptor *drm_desc = hwmap->priv;
  956. int i;
  957. for (i = 0; i < drm_desc->nb_objects; i++)
  958. close(drm_desc->objects[i].fd);
  959. av_freep(&drm_desc);
  960. }
  961. static int vaapi_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
  962. const AVFrame *src, int flags)
  963. {
  964. #if VA_CHECK_VERSION(1, 1, 0)
  965. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  966. VASurfaceID surface_id;
  967. VAStatus vas;
  968. VADRMPRIMESurfaceDescriptor va_desc;
  969. AVDRMFrameDescriptor *drm_desc = NULL;
  970. int err, i, j;
  971. surface_id = (VASurfaceID)(uintptr_t)src->data[3];
  972. vas = vaExportSurfaceHandle(hwctx->display, surface_id,
  973. VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
  974. VA_EXPORT_SURFACE_READ_ONLY |
  975. VA_EXPORT_SURFACE_SEPARATE_LAYERS,
  976. &va_desc);
  977. if (vas != VA_STATUS_SUCCESS) {
  978. if (vas == VA_STATUS_ERROR_UNIMPLEMENTED)
  979. return AVERROR(ENOSYS);
  980. av_log(hwfc, AV_LOG_ERROR, "Failed to export surface %#x: "
  981. "%d (%s).\n", surface_id, vas, vaErrorStr(vas));
  982. return AVERROR(EIO);
  983. }
  984. drm_desc = av_mallocz(sizeof(*drm_desc));
  985. if (!drm_desc) {
  986. err = AVERROR(ENOMEM);
  987. goto fail;
  988. }
  989. // By some bizarre coincidence, these structures are very similar...
  990. drm_desc->nb_objects = va_desc.num_objects;
  991. for (i = 0; i < va_desc.num_objects; i++) {
  992. drm_desc->objects[i].fd = va_desc.objects[i].fd;
  993. drm_desc->objects[i].size = va_desc.objects[i].size;
  994. drm_desc->objects[i].format_modifier =
  995. va_desc.objects[i].drm_format_modifier;
  996. }
  997. drm_desc->nb_layers = va_desc.num_layers;
  998. for (i = 0; i < va_desc.num_layers; i++) {
  999. drm_desc->layers[i].format = va_desc.layers[i].drm_format;
  1000. drm_desc->layers[i].nb_planes = va_desc.layers[i].num_planes;
  1001. for (j = 0; j < va_desc.layers[i].num_planes; j++) {
  1002. drm_desc->layers[i].planes[j].object_index =
  1003. va_desc.layers[i].object_index[j];
  1004. drm_desc->layers[i].planes[j].offset =
  1005. va_desc.layers[i].offset[j];
  1006. drm_desc->layers[i].planes[j].pitch =
  1007. va_desc.layers[i].pitch[j];
  1008. }
  1009. }
  1010. err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
  1011. &vaapi_unmap_to_drm, drm_desc);
  1012. if (err < 0)
  1013. goto fail;
  1014. dst->width = src->width;
  1015. dst->height = src->height;
  1016. dst->data[0] = (uint8_t*)drm_desc;
  1017. return 0;
  1018. fail:
  1019. for (i = 0; i < va_desc.num_objects; i++)
  1020. close(va_desc.objects[i].fd);
  1021. av_freep(&drm_desc);
  1022. return err;
  1023. #else
  1024. // Older versions without vaExportSurfaceHandle() are not supported -
  1025. // in theory this is possible with a combination of vaDeriveImage()
  1026. // and vaAcquireBufferHandle(), but it doesn't carry enough metadata
  1027. // to actually use the result in a generic way.
  1028. return AVERROR(ENOSYS);
  1029. #endif
  1030. }
  1031. #endif
  1032. static int vaapi_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
  1033. const AVFrame *src, int flags)
  1034. {
  1035. switch (src->format) {
  1036. #if CONFIG_LIBDRM
  1037. case AV_PIX_FMT_DRM_PRIME:
  1038. return vaapi_map_from_drm(hwfc, dst, src, flags);
  1039. #endif
  1040. default:
  1041. return AVERROR(ENOSYS);
  1042. }
  1043. }
  1044. static int vaapi_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
  1045. const AVFrame *src, int flags)
  1046. {
  1047. switch (dst->format) {
  1048. #if CONFIG_LIBDRM
  1049. case AV_PIX_FMT_DRM_PRIME:
  1050. return vaapi_map_to_drm(hwfc, dst, src, flags);
  1051. #endif
  1052. default:
  1053. return vaapi_map_to_memory(hwfc, dst, src, flags);
  1054. }
  1055. }
  1056. static void vaapi_device_free(AVHWDeviceContext *ctx)
  1057. {
  1058. AVVAAPIDeviceContext *hwctx = ctx->hwctx;
  1059. VAAPIDevicePriv *priv = ctx->user_opaque;
  1060. if (hwctx->display)
  1061. vaTerminate(hwctx->display);
  1062. #if HAVE_VAAPI_X11
  1063. if (priv->x11_display)
  1064. XCloseDisplay(priv->x11_display);
  1065. #endif
  1066. if (priv->drm_fd >= 0)
  1067. close(priv->drm_fd);
  1068. av_freep(&priv);
  1069. }
  1070. #if CONFIG_VAAPI_1
  1071. static void vaapi_device_log_error(void *context, const char *message)
  1072. {
  1073. AVHWDeviceContext *ctx = context;
  1074. av_log(ctx, AV_LOG_ERROR, "libva: %s", message);
  1075. }
  1076. static void vaapi_device_log_info(void *context, const char *message)
  1077. {
  1078. AVHWDeviceContext *ctx = context;
  1079. av_log(ctx, AV_LOG_VERBOSE, "libva: %s", message);
  1080. }
  1081. #endif
  1082. static int vaapi_device_connect(AVHWDeviceContext *ctx,
  1083. VADisplay display)
  1084. {
  1085. AVVAAPIDeviceContext *hwctx = ctx->hwctx;
  1086. int major, minor;
  1087. VAStatus vas;
  1088. #if CONFIG_VAAPI_1
  1089. vaSetErrorCallback(display, &vaapi_device_log_error, ctx);
  1090. vaSetInfoCallback (display, &vaapi_device_log_info, ctx);
  1091. #endif
  1092. hwctx->display = display;
  1093. vas = vaInitialize(display, &major, &minor);
  1094. if (vas != VA_STATUS_SUCCESS) {
  1095. av_log(ctx, AV_LOG_ERROR, "Failed to initialise VAAPI "
  1096. "connection: %d (%s).\n", vas, vaErrorStr(vas));
  1097. return AVERROR(EIO);
  1098. }
  1099. av_log(ctx, AV_LOG_VERBOSE, "Initialised VAAPI connection: "
  1100. "version %d.%d\n", major, minor);
  1101. return 0;
  1102. }
  1103. static int vaapi_device_create(AVHWDeviceContext *ctx, const char *device,
  1104. AVDictionary *opts, int flags)
  1105. {
  1106. VAAPIDevicePriv *priv;
  1107. VADisplay display = NULL;
  1108. priv = av_mallocz(sizeof(*priv));
  1109. if (!priv)
  1110. return AVERROR(ENOMEM);
  1111. priv->drm_fd = -1;
  1112. ctx->user_opaque = priv;
  1113. ctx->free = vaapi_device_free;
  1114. #if HAVE_VAAPI_X11
  1115. if (!display && !(device && device[0] == '/')) {
  1116. // Try to open the device as an X11 display.
  1117. priv->x11_display = XOpenDisplay(device);
  1118. if (!priv->x11_display) {
  1119. av_log(ctx, AV_LOG_VERBOSE, "Cannot open X11 display "
  1120. "%s.\n", XDisplayName(device));
  1121. } else {
  1122. display = vaGetDisplay(priv->x11_display);
  1123. if (!display) {
  1124. av_log(ctx, AV_LOG_ERROR, "Cannot open a VA display "
  1125. "from X11 display %s.\n", XDisplayName(device));
  1126. return AVERROR_UNKNOWN;
  1127. }
  1128. av_log(ctx, AV_LOG_VERBOSE, "Opened VA display via "
  1129. "X11 display %s.\n", XDisplayName(device));
  1130. }
  1131. }
  1132. #endif
  1133. #if HAVE_VAAPI_DRM
  1134. if (!display) {
  1135. // Try to open the device as a DRM path.
  1136. // Default to using the first render node if the user did not
  1137. // supply a path.
  1138. const char *path = device ? device : "/dev/dri/renderD128";
  1139. priv->drm_fd = open(path, O_RDWR);
  1140. if (priv->drm_fd < 0) {
  1141. av_log(ctx, AV_LOG_VERBOSE, "Cannot open DRM device %s.\n",
  1142. path);
  1143. } else {
  1144. display = vaGetDisplayDRM(priv->drm_fd);
  1145. if (!display) {
  1146. av_log(ctx, AV_LOG_ERROR, "Cannot open a VA display "
  1147. "from DRM device %s.\n", path);
  1148. return AVERROR_UNKNOWN;
  1149. }
  1150. av_log(ctx, AV_LOG_VERBOSE, "Opened VA display via "
  1151. "DRM device %s.\n", path);
  1152. }
  1153. }
  1154. #endif
  1155. if (!display) {
  1156. av_log(ctx, AV_LOG_ERROR, "No VA display found for "
  1157. "device: %s.\n", device ? device : "");
  1158. return AVERROR(EINVAL);
  1159. }
  1160. return vaapi_device_connect(ctx, display);
  1161. }
  1162. static int vaapi_device_derive(AVHWDeviceContext *ctx,
  1163. AVHWDeviceContext *src_ctx, int flags)
  1164. {
  1165. #if CONFIG_LIBDRM
  1166. if (src_ctx->type == AV_HWDEVICE_TYPE_DRM) {
  1167. AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
  1168. VADisplay *display;
  1169. VAAPIDevicePriv *priv;
  1170. if (src_hwctx->fd < 0) {
  1171. av_log(ctx, AV_LOG_ERROR, "DRM instance requires an associated "
  1172. "device to derive a VA display from.\n");
  1173. return AVERROR(EINVAL);
  1174. }
  1175. priv = av_mallocz(sizeof(*priv));
  1176. if (!priv)
  1177. return AVERROR(ENOMEM);
  1178. // Inherits the fd from the source context, which will close it.
  1179. priv->drm_fd = -1;
  1180. ctx->user_opaque = priv;
  1181. ctx->free = &vaapi_device_free;
  1182. display = vaGetDisplayDRM(src_hwctx->fd);
  1183. if (!display) {
  1184. av_log(ctx, AV_LOG_ERROR, "Failed to open a VA display from "
  1185. "DRM device.\n");
  1186. return AVERROR(EIO);
  1187. }
  1188. return vaapi_device_connect(ctx, display);
  1189. }
  1190. #endif
  1191. return AVERROR(ENOSYS);
  1192. }
  1193. const HWContextType ff_hwcontext_type_vaapi = {
  1194. .type = AV_HWDEVICE_TYPE_VAAPI,
  1195. .name = "VAAPI",
  1196. .device_hwctx_size = sizeof(AVVAAPIDeviceContext),
  1197. .device_priv_size = sizeof(VAAPIDeviceContext),
  1198. .device_hwconfig_size = sizeof(AVVAAPIHWConfig),
  1199. .frames_hwctx_size = sizeof(AVVAAPIFramesContext),
  1200. .frames_priv_size = sizeof(VAAPIFramesContext),
  1201. .device_create = &vaapi_device_create,
  1202. .device_derive = &vaapi_device_derive,
  1203. .device_init = &vaapi_device_init,
  1204. .device_uninit = &vaapi_device_uninit,
  1205. .frames_get_constraints = &vaapi_frames_get_constraints,
  1206. .frames_init = &vaapi_frames_init,
  1207. .frames_uninit = &vaapi_frames_uninit,
  1208. .frames_get_buffer = &vaapi_get_buffer,
  1209. .transfer_get_formats = &vaapi_transfer_get_formats,
  1210. .transfer_data_to = &vaapi_transfer_data_to,
  1211. .transfer_data_from = &vaapi_transfer_data_from,
  1212. .map_to = &vaapi_map_to,
  1213. .map_from = &vaapi_map_from,
  1214. .pix_fmts = (const enum AVPixelFormat[]) {
  1215. AV_PIX_FMT_VAAPI,
  1216. AV_PIX_FMT_NONE
  1217. },
  1218. };