You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1389 lines
44KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #if HAVE_VAAPI_X11
  20. # include <va/va_x11.h>
  21. #endif
  22. #if HAVE_VAAPI_DRM
  23. # include <va/va_drm.h>
  24. #endif
  25. #if CONFIG_LIBDRM
  26. # include <va/va_drmcommon.h>
  27. # include <drm_fourcc.h>
  28. #endif
  29. #include <fcntl.h>
  30. #if HAVE_UNISTD_H
  31. # include <unistd.h>
  32. #endif
  33. #include "avassert.h"
  34. #include "buffer.h"
  35. #include "common.h"
  36. #include "hwcontext.h"
  37. #include "hwcontext_drm.h"
  38. #include "hwcontext_internal.h"
  39. #include "hwcontext_vaapi.h"
  40. #include "mem.h"
  41. #include "pixdesc.h"
  42. #include "pixfmt.h"
  43. typedef struct VAAPIDevicePriv {
  44. #if HAVE_VAAPI_X11
  45. Display *x11_display;
  46. #endif
  47. int drm_fd;
  48. } VAAPIDevicePriv;
  49. typedef struct VAAPISurfaceFormat {
  50. enum AVPixelFormat pix_fmt;
  51. VAImageFormat image_format;
  52. } VAAPISurfaceFormat;
  53. typedef struct VAAPIDeviceContext {
  54. // Surface formats which can be used with this device.
  55. VAAPISurfaceFormat *formats;
  56. int nb_formats;
  57. } VAAPIDeviceContext;
  58. typedef struct VAAPIFramesContext {
  59. // Surface attributes set at create time.
  60. VASurfaceAttrib *attributes;
  61. int nb_attributes;
  62. // RT format of the underlying surface (Intel driver ignores this anyway).
  63. unsigned int rt_format;
  64. // Whether vaDeriveImage works.
  65. int derive_works;
  66. } VAAPIFramesContext;
  67. typedef struct VAAPIMapping {
  68. // Handle to the derived or copied image which is mapped.
  69. VAImage image;
  70. // The mapping flags actually used.
  71. int flags;
  72. } VAAPIMapping;
  73. #define MAP(va, rt, av) { \
  74. VA_FOURCC_ ## va, \
  75. VA_RT_FORMAT_ ## rt, \
  76. AV_PIX_FMT_ ## av \
  77. }
  78. // The map fourcc <-> pix_fmt isn't bijective because of the annoying U/V
  79. // plane swap cases. The frame handling below tries to hide these.
  80. static const struct {
  81. unsigned int fourcc;
  82. unsigned int rt_format;
  83. enum AVPixelFormat pix_fmt;
  84. } vaapi_format_map[] = {
  85. MAP(NV12, YUV420, NV12),
  86. MAP(YV12, YUV420, YUV420P), // With U/V planes swapped.
  87. MAP(IYUV, YUV420, YUV420P),
  88. #ifdef VA_FOURCC_I420
  89. MAP(I420, YUV420, YUV420P),
  90. #endif
  91. #ifdef VA_FOURCC_YV16
  92. MAP(YV16, YUV422, YUV422P), // With U/V planes swapped.
  93. #endif
  94. MAP(422H, YUV422, YUV422P),
  95. MAP(UYVY, YUV422, UYVY422),
  96. MAP(YUY2, YUV422, YUYV422),
  97. MAP(411P, YUV411, YUV411P),
  98. MAP(422V, YUV422, YUV440P),
  99. MAP(444P, YUV444, YUV444P),
  100. MAP(Y800, YUV400, GRAY8),
  101. #ifdef VA_FOURCC_P010
  102. MAP(P010, YUV420_10BPP, P010),
  103. #endif
  104. MAP(BGRA, RGB32, BGRA),
  105. MAP(BGRX, RGB32, BGR0),
  106. MAP(RGBA, RGB32, RGBA),
  107. MAP(RGBX, RGB32, RGB0),
  108. #ifdef VA_FOURCC_ABGR
  109. MAP(ABGR, RGB32, ABGR),
  110. MAP(XBGR, RGB32, 0BGR),
  111. #endif
  112. MAP(ARGB, RGB32, ARGB),
  113. MAP(XRGB, RGB32, 0RGB),
  114. };
  115. #undef MAP
  116. static enum AVPixelFormat vaapi_pix_fmt_from_fourcc(unsigned int fourcc)
  117. {
  118. int i;
  119. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_format_map); i++)
  120. if (vaapi_format_map[i].fourcc == fourcc)
  121. return vaapi_format_map[i].pix_fmt;
  122. return AV_PIX_FMT_NONE;
  123. }
  124. static int vaapi_get_image_format(AVHWDeviceContext *hwdev,
  125. enum AVPixelFormat pix_fmt,
  126. VAImageFormat **image_format)
  127. {
  128. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  129. int i;
  130. for (i = 0; i < ctx->nb_formats; i++) {
  131. if (ctx->formats[i].pix_fmt == pix_fmt) {
  132. if (image_format)
  133. *image_format = &ctx->formats[i].image_format;
  134. return 0;
  135. }
  136. }
  137. return AVERROR(EINVAL);
  138. }
  139. static int vaapi_frames_get_constraints(AVHWDeviceContext *hwdev,
  140. const void *hwconfig,
  141. AVHWFramesConstraints *constraints)
  142. {
  143. AVVAAPIDeviceContext *hwctx = hwdev->hwctx;
  144. const AVVAAPIHWConfig *config = hwconfig;
  145. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  146. VASurfaceAttrib *attr_list = NULL;
  147. VAStatus vas;
  148. enum AVPixelFormat pix_fmt;
  149. unsigned int fourcc;
  150. int err, i, j, attr_count, pix_fmt_count;
  151. if (config &&
  152. !(hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES)) {
  153. attr_count = 0;
  154. vas = vaQuerySurfaceAttributes(hwctx->display, config->config_id,
  155. 0, &attr_count);
  156. if (vas != VA_STATUS_SUCCESS) {
  157. av_log(hwdev, AV_LOG_ERROR, "Failed to query surface attributes: "
  158. "%d (%s).\n", vas, vaErrorStr(vas));
  159. err = AVERROR(ENOSYS);
  160. goto fail;
  161. }
  162. attr_list = av_malloc(attr_count * sizeof(*attr_list));
  163. if (!attr_list) {
  164. err = AVERROR(ENOMEM);
  165. goto fail;
  166. }
  167. vas = vaQuerySurfaceAttributes(hwctx->display, config->config_id,
  168. attr_list, &attr_count);
  169. if (vas != VA_STATUS_SUCCESS) {
  170. av_log(hwdev, AV_LOG_ERROR, "Failed to query surface attributes: "
  171. "%d (%s).\n", vas, vaErrorStr(vas));
  172. err = AVERROR(ENOSYS);
  173. goto fail;
  174. }
  175. pix_fmt_count = 0;
  176. for (i = 0; i < attr_count; i++) {
  177. switch (attr_list[i].type) {
  178. case VASurfaceAttribPixelFormat:
  179. fourcc = attr_list[i].value.value.i;
  180. pix_fmt = vaapi_pix_fmt_from_fourcc(fourcc);
  181. if (pix_fmt != AV_PIX_FMT_NONE) {
  182. ++pix_fmt_count;
  183. } else {
  184. // Something unsupported - ignore.
  185. }
  186. break;
  187. case VASurfaceAttribMinWidth:
  188. constraints->min_width = attr_list[i].value.value.i;
  189. break;
  190. case VASurfaceAttribMinHeight:
  191. constraints->min_height = attr_list[i].value.value.i;
  192. break;
  193. case VASurfaceAttribMaxWidth:
  194. constraints->max_width = attr_list[i].value.value.i;
  195. break;
  196. case VASurfaceAttribMaxHeight:
  197. constraints->max_height = attr_list[i].value.value.i;
  198. break;
  199. }
  200. }
  201. if (pix_fmt_count == 0) {
  202. // Nothing usable found. Presumably there exists something which
  203. // works, so leave the set null to indicate unknown.
  204. constraints->valid_sw_formats = NULL;
  205. } else {
  206. constraints->valid_sw_formats = av_malloc_array(pix_fmt_count + 1,
  207. sizeof(pix_fmt));
  208. if (!constraints->valid_sw_formats) {
  209. err = AVERROR(ENOMEM);
  210. goto fail;
  211. }
  212. for (i = j = 0; i < attr_count; i++) {
  213. if (attr_list[i].type != VASurfaceAttribPixelFormat)
  214. continue;
  215. fourcc = attr_list[i].value.value.i;
  216. pix_fmt = vaapi_pix_fmt_from_fourcc(fourcc);
  217. if (pix_fmt != AV_PIX_FMT_NONE)
  218. constraints->valid_sw_formats[j++] = pix_fmt;
  219. }
  220. av_assert0(j == pix_fmt_count);
  221. constraints->valid_sw_formats[j] = AV_PIX_FMT_NONE;
  222. }
  223. } else {
  224. // No configuration supplied.
  225. // Return the full set of image formats known by the implementation.
  226. constraints->valid_sw_formats = av_malloc_array(ctx->nb_formats + 1,
  227. sizeof(pix_fmt));
  228. if (!constraints->valid_sw_formats) {
  229. err = AVERROR(ENOMEM);
  230. goto fail;
  231. }
  232. for (i = 0; i < ctx->nb_formats; i++)
  233. constraints->valid_sw_formats[i] = ctx->formats[i].pix_fmt;
  234. constraints->valid_sw_formats[i] = AV_PIX_FMT_NONE;
  235. }
  236. constraints->valid_hw_formats = av_malloc_array(2, sizeof(pix_fmt));
  237. if (!constraints->valid_hw_formats) {
  238. err = AVERROR(ENOMEM);
  239. goto fail;
  240. }
  241. constraints->valid_hw_formats[0] = AV_PIX_FMT_VAAPI;
  242. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  243. err = 0;
  244. fail:
  245. av_freep(&attr_list);
  246. return err;
  247. }
  248. static const struct {
  249. const char *friendly_name;
  250. const char *match_string;
  251. unsigned int quirks;
  252. } vaapi_driver_quirks_table[] = {
  253. {
  254. "Intel i965 (Quick Sync)",
  255. "i965",
  256. AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS,
  257. },
  258. {
  259. "Intel iHD",
  260. "ubit",
  261. AV_VAAPI_DRIVER_QUIRK_ATTRIB_MEMTYPE,
  262. },
  263. {
  264. "VDPAU wrapper",
  265. "Splitted-Desktop Systems VDPAU backend for VA-API",
  266. AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES,
  267. },
  268. };
  269. static int vaapi_device_init(AVHWDeviceContext *hwdev)
  270. {
  271. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  272. AVVAAPIDeviceContext *hwctx = hwdev->hwctx;
  273. VAImageFormat *image_list = NULL;
  274. VAStatus vas;
  275. const char *vendor_string;
  276. int err, i, image_count;
  277. enum AVPixelFormat pix_fmt;
  278. unsigned int fourcc;
  279. image_count = vaMaxNumImageFormats(hwctx->display);
  280. if (image_count <= 0) {
  281. err = AVERROR(EIO);
  282. goto fail;
  283. }
  284. image_list = av_malloc(image_count * sizeof(*image_list));
  285. if (!image_list) {
  286. err = AVERROR(ENOMEM);
  287. goto fail;
  288. }
  289. vas = vaQueryImageFormats(hwctx->display, image_list, &image_count);
  290. if (vas != VA_STATUS_SUCCESS) {
  291. err = AVERROR(EIO);
  292. goto fail;
  293. }
  294. ctx->formats = av_malloc(image_count * sizeof(*ctx->formats));
  295. if (!ctx->formats) {
  296. err = AVERROR(ENOMEM);
  297. goto fail;
  298. }
  299. ctx->nb_formats = 0;
  300. for (i = 0; i < image_count; i++) {
  301. fourcc = image_list[i].fourcc;
  302. pix_fmt = vaapi_pix_fmt_from_fourcc(fourcc);
  303. if (pix_fmt == AV_PIX_FMT_NONE) {
  304. av_log(hwdev, AV_LOG_DEBUG, "Format %#x -> unknown.\n",
  305. fourcc);
  306. } else {
  307. av_log(hwdev, AV_LOG_DEBUG, "Format %#x -> %s.\n",
  308. fourcc, av_get_pix_fmt_name(pix_fmt));
  309. ctx->formats[ctx->nb_formats].pix_fmt = pix_fmt;
  310. ctx->formats[ctx->nb_formats].image_format = image_list[i];
  311. ++ctx->nb_formats;
  312. }
  313. }
  314. if (hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_USER_SET) {
  315. av_log(hwdev, AV_LOG_VERBOSE, "Not detecting driver: "
  316. "quirks set by user.\n");
  317. } else {
  318. // Detect the driver in use and set quirk flags if necessary.
  319. vendor_string = vaQueryVendorString(hwctx->display);
  320. hwctx->driver_quirks = 0;
  321. if (vendor_string) {
  322. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_driver_quirks_table); i++) {
  323. if (strstr(vendor_string,
  324. vaapi_driver_quirks_table[i].match_string)) {
  325. av_log(hwdev, AV_LOG_VERBOSE, "Matched \"%s\" as known "
  326. "driver \"%s\".\n", vendor_string,
  327. vaapi_driver_quirks_table[i].friendly_name);
  328. hwctx->driver_quirks |=
  329. vaapi_driver_quirks_table[i].quirks;
  330. break;
  331. }
  332. }
  333. if (!(i < FF_ARRAY_ELEMS(vaapi_driver_quirks_table))) {
  334. av_log(hwdev, AV_LOG_VERBOSE, "Unknown driver \"%s\", "
  335. "assuming standard behaviour.\n", vendor_string);
  336. }
  337. }
  338. }
  339. av_free(image_list);
  340. return 0;
  341. fail:
  342. av_freep(&ctx->formats);
  343. av_free(image_list);
  344. return err;
  345. }
  346. static void vaapi_device_uninit(AVHWDeviceContext *hwdev)
  347. {
  348. VAAPIDeviceContext *ctx = hwdev->internal->priv;
  349. av_freep(&ctx->formats);
  350. }
  351. static void vaapi_buffer_free(void *opaque, uint8_t *data)
  352. {
  353. AVHWFramesContext *hwfc = opaque;
  354. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  355. VASurfaceID surface_id;
  356. VAStatus vas;
  357. surface_id = (VASurfaceID)(uintptr_t)data;
  358. vas = vaDestroySurfaces(hwctx->display, &surface_id, 1);
  359. if (vas != VA_STATUS_SUCCESS) {
  360. av_log(hwfc, AV_LOG_ERROR, "Failed to destroy surface %#x: "
  361. "%d (%s).\n", surface_id, vas, vaErrorStr(vas));
  362. }
  363. }
  364. static AVBufferRef *vaapi_pool_alloc(void *opaque, int size)
  365. {
  366. AVHWFramesContext *hwfc = opaque;
  367. VAAPIFramesContext *ctx = hwfc->internal->priv;
  368. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  369. AVVAAPIFramesContext *avfc = hwfc->hwctx;
  370. VASurfaceID surface_id;
  371. VAStatus vas;
  372. AVBufferRef *ref;
  373. if (hwfc->initial_pool_size > 0 &&
  374. avfc->nb_surfaces >= hwfc->initial_pool_size)
  375. return NULL;
  376. vas = vaCreateSurfaces(hwctx->display, ctx->rt_format,
  377. hwfc->width, hwfc->height,
  378. &surface_id, 1,
  379. ctx->attributes, ctx->nb_attributes);
  380. if (vas != VA_STATUS_SUCCESS) {
  381. av_log(hwfc, AV_LOG_ERROR, "Failed to create surface: "
  382. "%d (%s).\n", vas, vaErrorStr(vas));
  383. return NULL;
  384. }
  385. av_log(hwfc, AV_LOG_DEBUG, "Created surface %#x.\n", surface_id);
  386. ref = av_buffer_create((uint8_t*)(uintptr_t)surface_id,
  387. sizeof(surface_id), &vaapi_buffer_free,
  388. hwfc, AV_BUFFER_FLAG_READONLY);
  389. if (!ref) {
  390. vaDestroySurfaces(hwctx->display, &surface_id, 1);
  391. return NULL;
  392. }
  393. if (hwfc->initial_pool_size > 0) {
  394. // This is a fixed-size pool, so we must still be in the initial
  395. // allocation sequence.
  396. av_assert0(avfc->nb_surfaces < hwfc->initial_pool_size);
  397. avfc->surface_ids[avfc->nb_surfaces] = surface_id;
  398. ++avfc->nb_surfaces;
  399. }
  400. return ref;
  401. }
  402. static int vaapi_frames_init(AVHWFramesContext *hwfc)
  403. {
  404. AVVAAPIFramesContext *avfc = hwfc->hwctx;
  405. VAAPIFramesContext *ctx = hwfc->internal->priv;
  406. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  407. VAImageFormat *expected_format;
  408. AVBufferRef *test_surface = NULL;
  409. VASurfaceID test_surface_id;
  410. VAImage test_image;
  411. VAStatus vas;
  412. int err, i;
  413. unsigned int fourcc, rt_format;
  414. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_format_map); i++) {
  415. if (vaapi_format_map[i].pix_fmt == hwfc->sw_format) {
  416. fourcc = vaapi_format_map[i].fourcc;
  417. rt_format = vaapi_format_map[i].rt_format;
  418. break;
  419. }
  420. }
  421. if (i >= FF_ARRAY_ELEMS(vaapi_format_map)) {
  422. av_log(hwfc, AV_LOG_ERROR, "Unsupported format: %s.\n",
  423. av_get_pix_fmt_name(hwfc->sw_format));
  424. return AVERROR(EINVAL);
  425. }
  426. if (!hwfc->pool) {
  427. if (!(hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_SURFACE_ATTRIBUTES)) {
  428. int need_memory_type = !(hwctx->driver_quirks & AV_VAAPI_DRIVER_QUIRK_ATTRIB_MEMTYPE);
  429. int need_pixel_format = 1;
  430. for (i = 0; i < avfc->nb_attributes; i++) {
  431. if (avfc->attributes[i].type == VASurfaceAttribMemoryType)
  432. need_memory_type = 0;
  433. if (avfc->attributes[i].type == VASurfaceAttribPixelFormat)
  434. need_pixel_format = 0;
  435. }
  436. ctx->nb_attributes =
  437. avfc->nb_attributes + need_memory_type + need_pixel_format;
  438. ctx->attributes = av_malloc(ctx->nb_attributes *
  439. sizeof(*ctx->attributes));
  440. if (!ctx->attributes) {
  441. err = AVERROR(ENOMEM);
  442. goto fail;
  443. }
  444. for (i = 0; i < avfc->nb_attributes; i++)
  445. ctx->attributes[i] = avfc->attributes[i];
  446. if (need_memory_type) {
  447. ctx->attributes[i++] = (VASurfaceAttrib) {
  448. .type = VASurfaceAttribMemoryType,
  449. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  450. .value.type = VAGenericValueTypeInteger,
  451. .value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA,
  452. };
  453. }
  454. if (need_pixel_format) {
  455. ctx->attributes[i++] = (VASurfaceAttrib) {
  456. .type = VASurfaceAttribPixelFormat,
  457. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  458. .value.type = VAGenericValueTypeInteger,
  459. .value.value.i = fourcc,
  460. };
  461. }
  462. av_assert0(i == ctx->nb_attributes);
  463. } else {
  464. ctx->attributes = NULL;
  465. ctx->nb_attributes = 0;
  466. }
  467. ctx->rt_format = rt_format;
  468. if (hwfc->initial_pool_size > 0) {
  469. // This pool will be usable as a render target, so we need to store
  470. // all of the surface IDs somewhere that vaCreateContext() calls
  471. // will be able to access them.
  472. avfc->nb_surfaces = 0;
  473. avfc->surface_ids = av_malloc(hwfc->initial_pool_size *
  474. sizeof(*avfc->surface_ids));
  475. if (!avfc->surface_ids) {
  476. err = AVERROR(ENOMEM);
  477. goto fail;
  478. }
  479. } else {
  480. // This pool allows dynamic sizing, and will not be usable as a
  481. // render target.
  482. avfc->nb_surfaces = 0;
  483. avfc->surface_ids = NULL;
  484. }
  485. hwfc->internal->pool_internal =
  486. av_buffer_pool_init2(sizeof(VASurfaceID), hwfc,
  487. &vaapi_pool_alloc, NULL);
  488. if (!hwfc->internal->pool_internal) {
  489. av_log(hwfc, AV_LOG_ERROR, "Failed to create VAAPI surface pool.\n");
  490. err = AVERROR(ENOMEM);
  491. goto fail;
  492. }
  493. }
  494. // Allocate a single surface to test whether vaDeriveImage() is going
  495. // to work for the specific configuration.
  496. if (hwfc->pool) {
  497. test_surface = av_buffer_pool_get(hwfc->pool);
  498. if (!test_surface) {
  499. av_log(hwfc, AV_LOG_ERROR, "Unable to allocate a surface from "
  500. "user-configured buffer pool.\n");
  501. err = AVERROR(ENOMEM);
  502. goto fail;
  503. }
  504. } else {
  505. test_surface = av_buffer_pool_get(hwfc->internal->pool_internal);
  506. if (!test_surface) {
  507. av_log(hwfc, AV_LOG_ERROR, "Unable to allocate a surface from "
  508. "internal buffer pool.\n");
  509. err = AVERROR(ENOMEM);
  510. goto fail;
  511. }
  512. }
  513. test_surface_id = (VASurfaceID)(uintptr_t)test_surface->data;
  514. ctx->derive_works = 0;
  515. err = vaapi_get_image_format(hwfc->device_ctx,
  516. hwfc->sw_format, &expected_format);
  517. if (err == 0) {
  518. vas = vaDeriveImage(hwctx->display, test_surface_id, &test_image);
  519. if (vas == VA_STATUS_SUCCESS) {
  520. if (expected_format->fourcc == test_image.format.fourcc) {
  521. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping possible.\n");
  522. ctx->derive_works = 1;
  523. } else {
  524. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping disabled: "
  525. "derived image format %08x does not match "
  526. "expected format %08x.\n",
  527. expected_format->fourcc, test_image.format.fourcc);
  528. }
  529. vaDestroyImage(hwctx->display, test_image.image_id);
  530. } else {
  531. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping disabled: "
  532. "deriving image does not work: "
  533. "%d (%s).\n", vas, vaErrorStr(vas));
  534. }
  535. } else {
  536. av_log(hwfc, AV_LOG_DEBUG, "Direct mapping disabled: "
  537. "image format is not supported.\n");
  538. }
  539. av_buffer_unref(&test_surface);
  540. return 0;
  541. fail:
  542. av_buffer_unref(&test_surface);
  543. av_freep(&avfc->surface_ids);
  544. av_freep(&ctx->attributes);
  545. return err;
  546. }
  547. static void vaapi_frames_uninit(AVHWFramesContext *hwfc)
  548. {
  549. AVVAAPIFramesContext *avfc = hwfc->hwctx;
  550. VAAPIFramesContext *ctx = hwfc->internal->priv;
  551. av_freep(&avfc->surface_ids);
  552. av_freep(&ctx->attributes);
  553. }
  554. static int vaapi_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
  555. {
  556. frame->buf[0] = av_buffer_pool_get(hwfc->pool);
  557. if (!frame->buf[0])
  558. return AVERROR(ENOMEM);
  559. frame->data[3] = frame->buf[0]->data;
  560. frame->format = AV_PIX_FMT_VAAPI;
  561. frame->width = hwfc->width;
  562. frame->height = hwfc->height;
  563. return 0;
  564. }
  565. static int vaapi_transfer_get_formats(AVHWFramesContext *hwfc,
  566. enum AVHWFrameTransferDirection dir,
  567. enum AVPixelFormat **formats)
  568. {
  569. VAAPIDeviceContext *ctx = hwfc->device_ctx->internal->priv;
  570. enum AVPixelFormat *pix_fmts;
  571. int i, k, sw_format_available;
  572. sw_format_available = 0;
  573. for (i = 0; i < ctx->nb_formats; i++) {
  574. if (ctx->formats[i].pix_fmt == hwfc->sw_format)
  575. sw_format_available = 1;
  576. }
  577. pix_fmts = av_malloc((ctx->nb_formats + 1) * sizeof(*pix_fmts));
  578. if (!pix_fmts)
  579. return AVERROR(ENOMEM);
  580. if (sw_format_available) {
  581. pix_fmts[0] = hwfc->sw_format;
  582. k = 1;
  583. } else {
  584. k = 0;
  585. }
  586. for (i = 0; i < ctx->nb_formats; i++) {
  587. if (ctx->formats[i].pix_fmt == hwfc->sw_format)
  588. continue;
  589. av_assert0(k < ctx->nb_formats);
  590. pix_fmts[k++] = ctx->formats[i].pix_fmt;
  591. }
  592. pix_fmts[k] = AV_PIX_FMT_NONE;
  593. *formats = pix_fmts;
  594. return 0;
  595. }
  596. static void vaapi_unmap_frame(AVHWFramesContext *hwfc,
  597. HWMapDescriptor *hwmap)
  598. {
  599. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  600. VAAPIMapping *map = hwmap->priv;
  601. VASurfaceID surface_id;
  602. VAStatus vas;
  603. surface_id = (VASurfaceID)(uintptr_t)hwmap->source->data[3];
  604. av_log(hwfc, AV_LOG_DEBUG, "Unmap surface %#x.\n", surface_id);
  605. vas = vaUnmapBuffer(hwctx->display, map->image.buf);
  606. if (vas != VA_STATUS_SUCCESS) {
  607. av_log(hwfc, AV_LOG_ERROR, "Failed to unmap image from surface "
  608. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  609. }
  610. if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
  611. !(map->flags & AV_HWFRAME_MAP_DIRECT)) {
  612. vas = vaPutImage(hwctx->display, surface_id, map->image.image_id,
  613. 0, 0, hwfc->width, hwfc->height,
  614. 0, 0, hwfc->width, hwfc->height);
  615. if (vas != VA_STATUS_SUCCESS) {
  616. av_log(hwfc, AV_LOG_ERROR, "Failed to write image to surface "
  617. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  618. }
  619. }
  620. vas = vaDestroyImage(hwctx->display, map->image.image_id);
  621. if (vas != VA_STATUS_SUCCESS) {
  622. av_log(hwfc, AV_LOG_ERROR, "Failed to destroy image from surface "
  623. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  624. }
  625. av_free(map);
  626. }
  627. static int vaapi_map_frame(AVHWFramesContext *hwfc,
  628. AVFrame *dst, const AVFrame *src, int flags)
  629. {
  630. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  631. VAAPIFramesContext *ctx = hwfc->internal->priv;
  632. VASurfaceID surface_id;
  633. VAImageFormat *image_format;
  634. VAAPIMapping *map;
  635. VAStatus vas;
  636. void *address = NULL;
  637. int err, i;
  638. surface_id = (VASurfaceID)(uintptr_t)src->data[3];
  639. av_log(hwfc, AV_LOG_DEBUG, "Map surface %#x.\n", surface_id);
  640. if (!ctx->derive_works && (flags & AV_HWFRAME_MAP_DIRECT)) {
  641. // Requested direct mapping but it is not possible.
  642. return AVERROR(EINVAL);
  643. }
  644. if (dst->format == AV_PIX_FMT_NONE)
  645. dst->format = hwfc->sw_format;
  646. if (dst->format != hwfc->sw_format && (flags & AV_HWFRAME_MAP_DIRECT)) {
  647. // Requested direct mapping but the formats do not match.
  648. return AVERROR(EINVAL);
  649. }
  650. err = vaapi_get_image_format(hwfc->device_ctx, dst->format, &image_format);
  651. if (err < 0) {
  652. // Requested format is not a valid output format.
  653. return AVERROR(EINVAL);
  654. }
  655. map = av_malloc(sizeof(*map));
  656. if (!map)
  657. return AVERROR(ENOMEM);
  658. map->flags = flags;
  659. map->image.image_id = VA_INVALID_ID;
  660. vas = vaSyncSurface(hwctx->display, surface_id);
  661. if (vas != VA_STATUS_SUCCESS) {
  662. av_log(hwfc, AV_LOG_ERROR, "Failed to sync surface "
  663. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  664. err = AVERROR(EIO);
  665. goto fail;
  666. }
  667. // The memory which we map using derive need not be connected to the CPU
  668. // in a way conducive to fast access. On Gen7-Gen9 Intel graphics, the
  669. // memory is mappable but not cached, so normal memcpy()-like access is
  670. // very slow to read it (but writing is ok). It is possible to read much
  671. // faster with a copy routine which is aware of the limitation, but we
  672. // assume for now that the user is not aware of that and would therefore
  673. // prefer not to be given direct-mapped memory if they request read access.
  674. if (ctx->derive_works && dst->format == hwfc->sw_format &&
  675. ((flags & AV_HWFRAME_MAP_DIRECT) || !(flags & AV_HWFRAME_MAP_READ))) {
  676. vas = vaDeriveImage(hwctx->display, surface_id, &map->image);
  677. if (vas != VA_STATUS_SUCCESS) {
  678. av_log(hwfc, AV_LOG_ERROR, "Failed to derive image from "
  679. "surface %#x: %d (%s).\n",
  680. surface_id, vas, vaErrorStr(vas));
  681. err = AVERROR(EIO);
  682. goto fail;
  683. }
  684. if (map->image.format.fourcc != image_format->fourcc) {
  685. av_log(hwfc, AV_LOG_ERROR, "Derive image of surface %#x "
  686. "is in wrong format: expected %#08x, got %#08x.\n",
  687. surface_id, image_format->fourcc, map->image.format.fourcc);
  688. err = AVERROR(EIO);
  689. goto fail;
  690. }
  691. map->flags |= AV_HWFRAME_MAP_DIRECT;
  692. } else {
  693. vas = vaCreateImage(hwctx->display, image_format,
  694. hwfc->width, hwfc->height, &map->image);
  695. if (vas != VA_STATUS_SUCCESS) {
  696. av_log(hwfc, AV_LOG_ERROR, "Failed to create image for "
  697. "surface %#x: %d (%s).\n",
  698. surface_id, vas, vaErrorStr(vas));
  699. err = AVERROR(EIO);
  700. goto fail;
  701. }
  702. if (!(flags & AV_HWFRAME_MAP_OVERWRITE)) {
  703. vas = vaGetImage(hwctx->display, surface_id, 0, 0,
  704. hwfc->width, hwfc->height, map->image.image_id);
  705. if (vas != VA_STATUS_SUCCESS) {
  706. av_log(hwfc, AV_LOG_ERROR, "Failed to read image from "
  707. "surface %#x: %d (%s).\n",
  708. surface_id, vas, vaErrorStr(vas));
  709. err = AVERROR(EIO);
  710. goto fail;
  711. }
  712. }
  713. }
  714. vas = vaMapBuffer(hwctx->display, map->image.buf, &address);
  715. if (vas != VA_STATUS_SUCCESS) {
  716. av_log(hwfc, AV_LOG_ERROR, "Failed to map image from surface "
  717. "%#x: %d (%s).\n", surface_id, vas, vaErrorStr(vas));
  718. err = AVERROR(EIO);
  719. goto fail;
  720. }
  721. err = ff_hwframe_map_create(src->hw_frames_ctx,
  722. dst, src, &vaapi_unmap_frame, map);
  723. if (err < 0)
  724. goto fail;
  725. dst->width = src->width;
  726. dst->height = src->height;
  727. for (i = 0; i < map->image.num_planes; i++) {
  728. dst->data[i] = (uint8_t*)address + map->image.offsets[i];
  729. dst->linesize[i] = map->image.pitches[i];
  730. }
  731. if (
  732. #ifdef VA_FOURCC_YV16
  733. map->image.format.fourcc == VA_FOURCC_YV16 ||
  734. #endif
  735. map->image.format.fourcc == VA_FOURCC_YV12) {
  736. // Chroma planes are YVU rather than YUV, so swap them.
  737. FFSWAP(uint8_t*, dst->data[1], dst->data[2]);
  738. }
  739. return 0;
  740. fail:
  741. if (map) {
  742. if (address)
  743. vaUnmapBuffer(hwctx->display, map->image.buf);
  744. if (map->image.image_id != VA_INVALID_ID)
  745. vaDestroyImage(hwctx->display, map->image.image_id);
  746. av_free(map);
  747. }
  748. return err;
  749. }
  750. static int vaapi_transfer_data_from(AVHWFramesContext *hwfc,
  751. AVFrame *dst, const AVFrame *src)
  752. {
  753. AVFrame *map;
  754. int err;
  755. if (dst->width > hwfc->width || dst->height > hwfc->height)
  756. return AVERROR(EINVAL);
  757. map = av_frame_alloc();
  758. if (!map)
  759. return AVERROR(ENOMEM);
  760. map->format = dst->format;
  761. err = vaapi_map_frame(hwfc, map, src, AV_HWFRAME_MAP_READ);
  762. if (err)
  763. goto fail;
  764. map->width = dst->width;
  765. map->height = dst->height;
  766. err = av_frame_copy(dst, map);
  767. if (err)
  768. goto fail;
  769. err = 0;
  770. fail:
  771. av_frame_free(&map);
  772. return err;
  773. }
  774. static int vaapi_transfer_data_to(AVHWFramesContext *hwfc,
  775. AVFrame *dst, const AVFrame *src)
  776. {
  777. AVFrame *map;
  778. int err;
  779. if (src->width > hwfc->width || src->height > hwfc->height)
  780. return AVERROR(EINVAL);
  781. map = av_frame_alloc();
  782. if (!map)
  783. return AVERROR(ENOMEM);
  784. map->format = src->format;
  785. err = vaapi_map_frame(hwfc, map, dst, AV_HWFRAME_MAP_WRITE | AV_HWFRAME_MAP_OVERWRITE);
  786. if (err)
  787. goto fail;
  788. map->width = src->width;
  789. map->height = src->height;
  790. err = av_frame_copy(map, src);
  791. if (err)
  792. goto fail;
  793. err = 0;
  794. fail:
  795. av_frame_free(&map);
  796. return err;
  797. }
  798. static int vaapi_map_to_memory(AVHWFramesContext *hwfc, AVFrame *dst,
  799. const AVFrame *src, int flags)
  800. {
  801. int err;
  802. if (dst->format != AV_PIX_FMT_NONE) {
  803. err = vaapi_get_image_format(hwfc->device_ctx, dst->format, NULL);
  804. if (err < 0)
  805. return AVERROR(ENOSYS);
  806. }
  807. err = vaapi_map_frame(hwfc, dst, src, flags);
  808. if (err)
  809. return err;
  810. err = av_frame_copy_props(dst, src);
  811. if (err)
  812. return err;
  813. return 0;
  814. }
  815. #if CONFIG_LIBDRM
  816. #define DRM_MAP(va, layers, ...) { \
  817. VA_FOURCC_ ## va, \
  818. layers, \
  819. { __VA_ARGS__ } \
  820. }
  821. static const struct {
  822. uint32_t va_fourcc;
  823. int nb_layer_formats;
  824. uint32_t layer_formats[AV_DRM_MAX_PLANES];
  825. } vaapi_drm_format_map[] = {
  826. #ifdef DRM_FORMAT_R8
  827. DRM_MAP(NV12, 2, DRM_FORMAT_R8, DRM_FORMAT_RG88),
  828. #endif
  829. DRM_MAP(NV12, 1, DRM_FORMAT_NV12),
  830. #if defined(VA_FOURCC_P010) && defined(DRM_FORMAT_R16)
  831. DRM_MAP(P010, 2, DRM_FORMAT_R16, DRM_FORMAT_RG1616),
  832. #endif
  833. DRM_MAP(BGRA, 1, DRM_FORMAT_ARGB8888),
  834. DRM_MAP(BGRX, 1, DRM_FORMAT_XRGB8888),
  835. DRM_MAP(RGBA, 1, DRM_FORMAT_ABGR8888),
  836. DRM_MAP(RGBX, 1, DRM_FORMAT_XBGR8888),
  837. #ifdef VA_FOURCC_ABGR
  838. DRM_MAP(ABGR, 1, DRM_FORMAT_RGBA8888),
  839. DRM_MAP(XBGR, 1, DRM_FORMAT_RGBX8888),
  840. #endif
  841. DRM_MAP(ARGB, 1, DRM_FORMAT_BGRA8888),
  842. DRM_MAP(XRGB, 1, DRM_FORMAT_BGRX8888),
  843. };
  844. #undef DRM_MAP
  845. static void vaapi_unmap_from_drm(AVHWFramesContext *dst_fc,
  846. HWMapDescriptor *hwmap)
  847. {
  848. AVVAAPIDeviceContext *dst_dev = dst_fc->device_ctx->hwctx;
  849. VASurfaceID surface_id = (VASurfaceID)(uintptr_t)hwmap->priv;
  850. av_log(dst_fc, AV_LOG_DEBUG, "Destroy surface %#x.\n", surface_id);
  851. vaDestroySurfaces(dst_dev->display, &surface_id, 1);
  852. }
  853. static int vaapi_map_from_drm(AVHWFramesContext *src_fc, AVFrame *dst,
  854. const AVFrame *src, int flags)
  855. {
  856. AVHWFramesContext *dst_fc =
  857. (AVHWFramesContext*)dst->hw_frames_ctx->data;
  858. AVVAAPIDeviceContext *dst_dev = dst_fc->device_ctx->hwctx;
  859. const AVDRMFrameDescriptor *desc;
  860. VASurfaceID surface_id;
  861. VAStatus vas;
  862. uint32_t va_fourcc, va_rt_format;
  863. int err, i, j, k;
  864. unsigned long buffer_handle;
  865. VASurfaceAttribExternalBuffers buffer_desc;
  866. VASurfaceAttrib attrs[2] = {
  867. {
  868. .type = VASurfaceAttribMemoryType,
  869. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  870. .value.type = VAGenericValueTypeInteger,
  871. .value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME,
  872. },
  873. {
  874. .type = VASurfaceAttribExternalBufferDescriptor,
  875. .flags = VA_SURFACE_ATTRIB_SETTABLE,
  876. .value.type = VAGenericValueTypePointer,
  877. .value.value.p = &buffer_desc,
  878. }
  879. };
  880. desc = (AVDRMFrameDescriptor*)src->data[0];
  881. if (desc->nb_objects != 1) {
  882. av_log(dst_fc, AV_LOG_ERROR, "VAAPI can only map frames "
  883. "made from a single DRM object.\n");
  884. return AVERROR(EINVAL);
  885. }
  886. va_fourcc = 0;
  887. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_drm_format_map); i++) {
  888. if (desc->nb_layers != vaapi_drm_format_map[i].nb_layer_formats)
  889. continue;
  890. for (j = 0; j < desc->nb_layers; j++) {
  891. if (desc->layers[j].format !=
  892. vaapi_drm_format_map[i].layer_formats[j])
  893. break;
  894. }
  895. if (j != desc->nb_layers)
  896. continue;
  897. va_fourcc = vaapi_drm_format_map[i].va_fourcc;
  898. break;
  899. }
  900. if (!va_fourcc) {
  901. av_log(dst_fc, AV_LOG_ERROR, "DRM format not supported "
  902. "by VAAPI.\n");
  903. return AVERROR(EINVAL);
  904. }
  905. av_log(dst_fc, AV_LOG_DEBUG, "Map DRM object %d to VAAPI as "
  906. "%08x.\n", desc->objects[0].fd, va_fourcc);
  907. for (i = 0; i < FF_ARRAY_ELEMS(vaapi_format_map); i++) {
  908. if (vaapi_format_map[i].fourcc == va_fourcc)
  909. va_rt_format = vaapi_format_map[i].rt_format;
  910. }
  911. buffer_handle = desc->objects[0].fd;
  912. buffer_desc.pixel_format = va_fourcc;
  913. buffer_desc.width = src_fc->width;
  914. buffer_desc.height = src_fc->height;
  915. buffer_desc.data_size = desc->objects[0].size;
  916. buffer_desc.buffers = &buffer_handle;
  917. buffer_desc.num_buffers = 1;
  918. buffer_desc.flags = 0;
  919. k = 0;
  920. for (i = 0; i < desc->nb_layers; i++) {
  921. for (j = 0; j < desc->layers[i].nb_planes; j++) {
  922. buffer_desc.pitches[k] = desc->layers[i].planes[j].pitch;
  923. buffer_desc.offsets[k] = desc->layers[i].planes[j].offset;
  924. ++k;
  925. }
  926. }
  927. buffer_desc.num_planes = k;
  928. vas = vaCreateSurfaces(dst_dev->display, va_rt_format,
  929. src->width, src->height,
  930. &surface_id, 1,
  931. attrs, FF_ARRAY_ELEMS(attrs));
  932. if (vas != VA_STATUS_SUCCESS) {
  933. av_log(dst_fc, AV_LOG_ERROR, "Failed to create surface from DRM "
  934. "object: %d (%s).\n", vas, vaErrorStr(vas));
  935. return AVERROR(EIO);
  936. }
  937. av_log(dst_fc, AV_LOG_DEBUG, "Create surface %#x.\n", surface_id);
  938. err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
  939. &vaapi_unmap_from_drm,
  940. (void*)(uintptr_t)surface_id);
  941. if (err < 0)
  942. return err;
  943. dst->width = src->width;
  944. dst->height = src->height;
  945. dst->data[3] = (uint8_t*)(uintptr_t)surface_id;
  946. av_log(dst_fc, AV_LOG_DEBUG, "Mapped DRM object %d to "
  947. "surface %#x.\n", desc->objects[0].fd, surface_id);
  948. return 0;
  949. }
  950. static void vaapi_unmap_to_drm(AVHWFramesContext *dst_fc,
  951. HWMapDescriptor *hwmap)
  952. {
  953. AVDRMFrameDescriptor *drm_desc = hwmap->priv;
  954. int i;
  955. for (i = 0; i < drm_desc->nb_objects; i++)
  956. close(drm_desc->objects[i].fd);
  957. av_freep(&drm_desc);
  958. }
  959. static int vaapi_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
  960. const AVFrame *src, int flags)
  961. {
  962. #if VA_CHECK_VERSION(1, 1, 0)
  963. AVVAAPIDeviceContext *hwctx = hwfc->device_ctx->hwctx;
  964. VASurfaceID surface_id;
  965. VAStatus vas;
  966. VADRMPRIMESurfaceDescriptor va_desc;
  967. AVDRMFrameDescriptor *drm_desc = NULL;
  968. int err, i, j;
  969. surface_id = (VASurfaceID)(uintptr_t)src->data[3];
  970. vas = vaExportSurfaceHandle(hwctx->display, surface_id,
  971. VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
  972. VA_EXPORT_SURFACE_READ_ONLY |
  973. VA_EXPORT_SURFACE_SEPARATE_LAYERS,
  974. &va_desc);
  975. if (vas != VA_STATUS_SUCCESS) {
  976. if (vas == VA_STATUS_ERROR_UNIMPLEMENTED)
  977. return AVERROR(ENOSYS);
  978. av_log(hwfc, AV_LOG_ERROR, "Failed to export surface %#x: "
  979. "%d (%s).\n", surface_id, vas, vaErrorStr(vas));
  980. return AVERROR(EIO);
  981. }
  982. drm_desc = av_mallocz(sizeof(*drm_desc));
  983. if (!drm_desc) {
  984. err = AVERROR(ENOMEM);
  985. goto fail;
  986. }
  987. // By some bizarre coincidence, these structures are very similar...
  988. drm_desc->nb_objects = va_desc.num_objects;
  989. for (i = 0; i < va_desc.num_objects; i++) {
  990. drm_desc->objects[i].fd = va_desc.objects[i].fd;
  991. drm_desc->objects[i].size = va_desc.objects[i].size;
  992. drm_desc->objects[i].format_modifier =
  993. va_desc.objects[i].drm_format_modifier;
  994. }
  995. drm_desc->nb_layers = va_desc.num_layers;
  996. for (i = 0; i < va_desc.num_layers; i++) {
  997. drm_desc->layers[i].format = va_desc.layers[i].drm_format;
  998. drm_desc->layers[i].nb_planes = va_desc.layers[i].num_planes;
  999. for (j = 0; j < va_desc.layers[i].num_planes; j++) {
  1000. drm_desc->layers[i].planes[j].object_index =
  1001. va_desc.layers[i].object_index[j];
  1002. drm_desc->layers[i].planes[j].offset =
  1003. va_desc.layers[i].offset[j];
  1004. drm_desc->layers[i].planes[j].pitch =
  1005. va_desc.layers[i].pitch[j];
  1006. }
  1007. }
  1008. err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
  1009. &vaapi_unmap_to_drm, drm_desc);
  1010. if (err < 0)
  1011. goto fail;
  1012. dst->width = src->width;
  1013. dst->height = src->height;
  1014. dst->data[0] = (uint8_t*)drm_desc;
  1015. return 0;
  1016. fail:
  1017. for (i = 0; i < va_desc.num_objects; i++)
  1018. close(va_desc.objects[i].fd);
  1019. av_freep(&drm_desc);
  1020. return err;
  1021. #else
  1022. // Older versions without vaExportSurfaceHandle() are not supported -
  1023. // in theory this is possible with a combination of vaDeriveImage()
  1024. // and vaAcquireBufferHandle(), but it doesn't carry enough metadata
  1025. // to actually use the result in a generic way.
  1026. return AVERROR(ENOSYS);
  1027. #endif
  1028. }
  1029. #endif
  1030. static int vaapi_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
  1031. const AVFrame *src, int flags)
  1032. {
  1033. switch (src->format) {
  1034. #if CONFIG_LIBDRM
  1035. case AV_PIX_FMT_DRM_PRIME:
  1036. return vaapi_map_from_drm(hwfc, dst, src, flags);
  1037. #endif
  1038. default:
  1039. return AVERROR(ENOSYS);
  1040. }
  1041. }
  1042. static int vaapi_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
  1043. const AVFrame *src, int flags)
  1044. {
  1045. switch (dst->format) {
  1046. #if CONFIG_LIBDRM
  1047. case AV_PIX_FMT_DRM_PRIME:
  1048. return vaapi_map_to_drm(hwfc, dst, src, flags);
  1049. #endif
  1050. default:
  1051. return vaapi_map_to_memory(hwfc, dst, src, flags);
  1052. }
  1053. }
  1054. static void vaapi_device_free(AVHWDeviceContext *ctx)
  1055. {
  1056. AVVAAPIDeviceContext *hwctx = ctx->hwctx;
  1057. VAAPIDevicePriv *priv = ctx->user_opaque;
  1058. if (hwctx->display)
  1059. vaTerminate(hwctx->display);
  1060. #if HAVE_VAAPI_X11
  1061. if (priv->x11_display)
  1062. XCloseDisplay(priv->x11_display);
  1063. #endif
  1064. if (priv->drm_fd >= 0)
  1065. close(priv->drm_fd);
  1066. av_freep(&priv);
  1067. }
  1068. #if CONFIG_VAAPI_1
  1069. static void vaapi_device_log_error(void *context, const char *message)
  1070. {
  1071. AVHWDeviceContext *ctx = context;
  1072. av_log(ctx, AV_LOG_ERROR, "libva: %s", message);
  1073. }
  1074. static void vaapi_device_log_info(void *context, const char *message)
  1075. {
  1076. AVHWDeviceContext *ctx = context;
  1077. av_log(ctx, AV_LOG_VERBOSE, "libva: %s", message);
  1078. }
  1079. #endif
  1080. static int vaapi_device_connect(AVHWDeviceContext *ctx,
  1081. VADisplay display)
  1082. {
  1083. AVVAAPIDeviceContext *hwctx = ctx->hwctx;
  1084. int major, minor;
  1085. VAStatus vas;
  1086. #if CONFIG_VAAPI_1
  1087. vaSetErrorCallback(display, &vaapi_device_log_error, ctx);
  1088. vaSetInfoCallback (display, &vaapi_device_log_info, ctx);
  1089. #endif
  1090. hwctx->display = display;
  1091. vas = vaInitialize(display, &major, &minor);
  1092. if (vas != VA_STATUS_SUCCESS) {
  1093. av_log(ctx, AV_LOG_ERROR, "Failed to initialise VAAPI "
  1094. "connection: %d (%s).\n", vas, vaErrorStr(vas));
  1095. return AVERROR(EIO);
  1096. }
  1097. av_log(ctx, AV_LOG_VERBOSE, "Initialised VAAPI connection: "
  1098. "version %d.%d\n", major, minor);
  1099. return 0;
  1100. }
  1101. static int vaapi_device_create(AVHWDeviceContext *ctx, const char *device,
  1102. AVDictionary *opts, int flags)
  1103. {
  1104. VAAPIDevicePriv *priv;
  1105. VADisplay display = NULL;
  1106. priv = av_mallocz(sizeof(*priv));
  1107. if (!priv)
  1108. return AVERROR(ENOMEM);
  1109. priv->drm_fd = -1;
  1110. ctx->user_opaque = priv;
  1111. ctx->free = vaapi_device_free;
  1112. #if HAVE_VAAPI_X11
  1113. if (!display && !(device && device[0] == '/')) {
  1114. // Try to open the device as an X11 display.
  1115. priv->x11_display = XOpenDisplay(device);
  1116. if (!priv->x11_display) {
  1117. av_log(ctx, AV_LOG_VERBOSE, "Cannot open X11 display "
  1118. "%s.\n", XDisplayName(device));
  1119. } else {
  1120. display = vaGetDisplay(priv->x11_display);
  1121. if (!display) {
  1122. av_log(ctx, AV_LOG_ERROR, "Cannot open a VA display "
  1123. "from X11 display %s.\n", XDisplayName(device));
  1124. return AVERROR_UNKNOWN;
  1125. }
  1126. av_log(ctx, AV_LOG_VERBOSE, "Opened VA display via "
  1127. "X11 display %s.\n", XDisplayName(device));
  1128. }
  1129. }
  1130. #endif
  1131. #if HAVE_VAAPI_DRM
  1132. if (!display) {
  1133. // Try to open the device as a DRM path.
  1134. // Default to using the first render node if the user did not
  1135. // supply a path.
  1136. const char *path = device ? device : "/dev/dri/renderD128";
  1137. priv->drm_fd = open(path, O_RDWR);
  1138. if (priv->drm_fd < 0) {
  1139. av_log(ctx, AV_LOG_VERBOSE, "Cannot open DRM device %s.\n",
  1140. path);
  1141. } else {
  1142. display = vaGetDisplayDRM(priv->drm_fd);
  1143. if (!display) {
  1144. av_log(ctx, AV_LOG_ERROR, "Cannot open a VA display "
  1145. "from DRM device %s.\n", path);
  1146. return AVERROR_UNKNOWN;
  1147. }
  1148. av_log(ctx, AV_LOG_VERBOSE, "Opened VA display via "
  1149. "DRM device %s.\n", path);
  1150. }
  1151. }
  1152. #endif
  1153. if (!display) {
  1154. av_log(ctx, AV_LOG_ERROR, "No VA display found for "
  1155. "device: %s.\n", device ? device : "");
  1156. return AVERROR(EINVAL);
  1157. }
  1158. return vaapi_device_connect(ctx, display);
  1159. }
  1160. static int vaapi_device_derive(AVHWDeviceContext *ctx,
  1161. AVHWDeviceContext *src_ctx, int flags)
  1162. {
  1163. #if HAVE_VAAPI_DRM
  1164. if (src_ctx->type == AV_HWDEVICE_TYPE_DRM) {
  1165. AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
  1166. VADisplay *display;
  1167. VAAPIDevicePriv *priv;
  1168. if (src_hwctx->fd < 0) {
  1169. av_log(ctx, AV_LOG_ERROR, "DRM instance requires an associated "
  1170. "device to derive a VA display from.\n");
  1171. return AVERROR(EINVAL);
  1172. }
  1173. priv = av_mallocz(sizeof(*priv));
  1174. if (!priv)
  1175. return AVERROR(ENOMEM);
  1176. // Inherits the fd from the source context, which will close it.
  1177. priv->drm_fd = -1;
  1178. ctx->user_opaque = priv;
  1179. ctx->free = &vaapi_device_free;
  1180. display = vaGetDisplayDRM(src_hwctx->fd);
  1181. if (!display) {
  1182. av_log(ctx, AV_LOG_ERROR, "Failed to open a VA display from "
  1183. "DRM device.\n");
  1184. return AVERROR(EIO);
  1185. }
  1186. return vaapi_device_connect(ctx, display);
  1187. }
  1188. #endif
  1189. return AVERROR(ENOSYS);
  1190. }
  1191. const HWContextType ff_hwcontext_type_vaapi = {
  1192. .type = AV_HWDEVICE_TYPE_VAAPI,
  1193. .name = "VAAPI",
  1194. .device_hwctx_size = sizeof(AVVAAPIDeviceContext),
  1195. .device_priv_size = sizeof(VAAPIDeviceContext),
  1196. .device_hwconfig_size = sizeof(AVVAAPIHWConfig),
  1197. .frames_hwctx_size = sizeof(AVVAAPIFramesContext),
  1198. .frames_priv_size = sizeof(VAAPIFramesContext),
  1199. .device_create = &vaapi_device_create,
  1200. .device_derive = &vaapi_device_derive,
  1201. .device_init = &vaapi_device_init,
  1202. .device_uninit = &vaapi_device_uninit,
  1203. .frames_get_constraints = &vaapi_frames_get_constraints,
  1204. .frames_init = &vaapi_frames_init,
  1205. .frames_uninit = &vaapi_frames_uninit,
  1206. .frames_get_buffer = &vaapi_get_buffer,
  1207. .transfer_get_formats = &vaapi_transfer_get_formats,
  1208. .transfer_data_to = &vaapi_transfer_data_to,
  1209. .transfer_data_from = &vaapi_transfer_data_from,
  1210. .map_to = &vaapi_map_to,
  1211. .map_from = &vaapi_map_from,
  1212. .pix_fmts = (const enum AVPixelFormat[]) {
  1213. AV_PIX_FMT_VAAPI,
  1214. AV_PIX_FMT_NONE
  1215. },
  1216. };