You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1245 lines
42KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "formats.h"
  19. #include "vulkan.h"
  20. #include "glslang.h"
  21. /* Generic macro for creating contexts which need to keep their addresses
  22. * if another context is created. */
  23. #define FN_CREATING(ctx, type, shortname, array, num) \
  24. static av_always_inline type *create_ ##shortname(ctx *dctx) \
  25. { \
  26. type **array, *sctx = av_mallocz(sizeof(*sctx)); \
  27. if (!sctx) \
  28. return NULL; \
  29. \
  30. array = av_realloc_array(dctx->array, sizeof(*dctx->array), dctx->num + 1);\
  31. if (!array) { \
  32. av_free(sctx); \
  33. return NULL; \
  34. } \
  35. \
  36. dctx->array = array; \
  37. dctx->array[dctx->num++] = sctx; \
  38. \
  39. return sctx; \
  40. }
  41. const VkComponentMapping ff_comp_identity_map = {
  42. .r = VK_COMPONENT_SWIZZLE_IDENTITY,
  43. .g = VK_COMPONENT_SWIZZLE_IDENTITY,
  44. .b = VK_COMPONENT_SWIZZLE_IDENTITY,
  45. .a = VK_COMPONENT_SWIZZLE_IDENTITY,
  46. };
  47. /* Converts return values to strings */
  48. const char *ff_vk_ret2str(VkResult res)
  49. {
  50. #define CASE(VAL) case VAL: return #VAL
  51. switch (res) {
  52. CASE(VK_SUCCESS);
  53. CASE(VK_NOT_READY);
  54. CASE(VK_TIMEOUT);
  55. CASE(VK_EVENT_SET);
  56. CASE(VK_EVENT_RESET);
  57. CASE(VK_INCOMPLETE);
  58. CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
  59. CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
  60. CASE(VK_ERROR_INITIALIZATION_FAILED);
  61. CASE(VK_ERROR_DEVICE_LOST);
  62. CASE(VK_ERROR_MEMORY_MAP_FAILED);
  63. CASE(VK_ERROR_LAYER_NOT_PRESENT);
  64. CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
  65. CASE(VK_ERROR_FEATURE_NOT_PRESENT);
  66. CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
  67. CASE(VK_ERROR_TOO_MANY_OBJECTS);
  68. CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
  69. CASE(VK_ERROR_FRAGMENTED_POOL);
  70. CASE(VK_ERROR_SURFACE_LOST_KHR);
  71. CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
  72. CASE(VK_SUBOPTIMAL_KHR);
  73. CASE(VK_ERROR_OUT_OF_DATE_KHR);
  74. CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
  75. CASE(VK_ERROR_VALIDATION_FAILED_EXT);
  76. CASE(VK_ERROR_INVALID_SHADER_NV);
  77. CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
  78. CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
  79. CASE(VK_ERROR_NOT_PERMITTED_EXT);
  80. default: return "Unknown error";
  81. }
  82. #undef CASE
  83. }
  84. static int vk_alloc_mem(AVFilterContext *avctx, VkMemoryRequirements *req,
  85. VkMemoryPropertyFlagBits req_flags, void *alloc_extension,
  86. VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
  87. {
  88. VkResult ret;
  89. int index = -1;
  90. VkPhysicalDeviceProperties props;
  91. VkPhysicalDeviceMemoryProperties mprops;
  92. VulkanFilterContext *s = avctx->priv;
  93. VkMemoryAllocateInfo alloc_info = {
  94. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  95. .pNext = alloc_extension,
  96. };
  97. vkGetPhysicalDeviceProperties(s->hwctx->phys_dev, &props);
  98. vkGetPhysicalDeviceMemoryProperties(s->hwctx->phys_dev, &mprops);
  99. /* Align if we need to */
  100. if (req_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
  101. req->size = FFALIGN(req->size, props.limits.minMemoryMapAlignment);
  102. alloc_info.allocationSize = req->size;
  103. /* The vulkan spec requires memory types to be sorted in the "optimal"
  104. * order, so the first matching type we find will be the best/fastest one */
  105. for (int i = 0; i < mprops.memoryTypeCount; i++) {
  106. /* The memory type must be supported by the requirements (bitfield) */
  107. if (!(req->memoryTypeBits & (1 << i)))
  108. continue;
  109. /* The memory type flags must include our properties */
  110. if ((mprops.memoryTypes[i].propertyFlags & req_flags) != req_flags)
  111. continue;
  112. /* Found a suitable memory type */
  113. index = i;
  114. break;
  115. }
  116. if (index < 0) {
  117. av_log(avctx, AV_LOG_ERROR, "No memory type found for flags 0x%x\n",
  118. req_flags);
  119. return AVERROR(EINVAL);
  120. }
  121. alloc_info.memoryTypeIndex = index;
  122. ret = vkAllocateMemory(s->hwctx->act_dev, &alloc_info,
  123. s->hwctx->alloc, mem);
  124. if (ret != VK_SUCCESS) {
  125. av_log(avctx, AV_LOG_ERROR, "Failed to allocate memory: %s\n",
  126. ff_vk_ret2str(ret));
  127. return AVERROR(ENOMEM);
  128. }
  129. *mem_flags |= mprops.memoryTypes[index].propertyFlags;
  130. return 0;
  131. }
  132. int ff_vk_create_buf(AVFilterContext *avctx, FFVkBuffer *buf, size_t size,
  133. VkBufferUsageFlags usage, VkMemoryPropertyFlagBits flags)
  134. {
  135. int err;
  136. VkResult ret;
  137. VkMemoryRequirements req;
  138. VulkanFilterContext *s = avctx->priv;
  139. VkBufferCreateInfo buf_spawn = {
  140. .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
  141. .pNext = NULL,
  142. .usage = usage,
  143. .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
  144. .size = size, /* Gets FFALIGNED during alloc if host visible
  145. but should be ok */
  146. };
  147. ret = vkCreateBuffer(s->hwctx->act_dev, &buf_spawn, NULL, &buf->buf);
  148. if (ret != VK_SUCCESS) {
  149. av_log(avctx, AV_LOG_ERROR, "Failed to create buffer: %s\n",
  150. ff_vk_ret2str(ret));
  151. return AVERROR_EXTERNAL;
  152. }
  153. vkGetBufferMemoryRequirements(s->hwctx->act_dev, buf->buf, &req);
  154. err = vk_alloc_mem(avctx, &req, flags, NULL, &buf->flags, &buf->mem);
  155. if (err)
  156. return err;
  157. ret = vkBindBufferMemory(s->hwctx->act_dev, buf->buf, buf->mem, 0);
  158. if (ret != VK_SUCCESS) {
  159. av_log(avctx, AV_LOG_ERROR, "Failed to bind memory to buffer: %s\n",
  160. ff_vk_ret2str(ret));
  161. return AVERROR_EXTERNAL;
  162. }
  163. return 0;
  164. }
  165. int ff_vk_map_buffers(AVFilterContext *avctx, FFVkBuffer *buf, uint8_t *mem[],
  166. int nb_buffers, int invalidate)
  167. {
  168. VkResult ret;
  169. VulkanFilterContext *s = avctx->priv;
  170. VkMappedMemoryRange *inval_list = NULL;
  171. int inval_count = 0;
  172. for (int i = 0; i < nb_buffers; i++) {
  173. ret = vkMapMemory(s->hwctx->act_dev, buf[i].mem, 0,
  174. VK_WHOLE_SIZE, 0, (void **)&mem[i]);
  175. if (ret != VK_SUCCESS) {
  176. av_log(avctx, AV_LOG_ERROR, "Failed to map buffer memory: %s\n",
  177. ff_vk_ret2str(ret));
  178. return AVERROR_EXTERNAL;
  179. }
  180. }
  181. if (!invalidate)
  182. return 0;
  183. for (int i = 0; i < nb_buffers; i++) {
  184. const VkMappedMemoryRange ival_buf = {
  185. .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
  186. .memory = buf[i].mem,
  187. .size = VK_WHOLE_SIZE,
  188. };
  189. if (buf[i].flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
  190. continue;
  191. inval_list = av_fast_realloc(s->scratch, &s->scratch_size,
  192. (++inval_count)*sizeof(*inval_list));
  193. if (!inval_list)
  194. return AVERROR(ENOMEM);
  195. inval_list[inval_count - 1] = ival_buf;
  196. }
  197. if (inval_count) {
  198. ret = vkInvalidateMappedMemoryRanges(s->hwctx->act_dev, inval_count,
  199. inval_list);
  200. if (ret != VK_SUCCESS) {
  201. av_log(avctx, AV_LOG_ERROR, "Failed to invalidate memory: %s\n",
  202. ff_vk_ret2str(ret));
  203. return AVERROR_EXTERNAL;
  204. }
  205. }
  206. return 0;
  207. }
  208. int ff_vk_unmap_buffers(AVFilterContext *avctx, FFVkBuffer *buf, int nb_buffers,
  209. int flush)
  210. {
  211. int err = 0;
  212. VkResult ret;
  213. VulkanFilterContext *s = avctx->priv;
  214. VkMappedMemoryRange *flush_list = NULL;
  215. int flush_count = 0;
  216. if (flush) {
  217. for (int i = 0; i < nb_buffers; i++) {
  218. const VkMappedMemoryRange flush_buf = {
  219. .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
  220. .memory = buf[i].mem,
  221. .size = VK_WHOLE_SIZE,
  222. };
  223. if (buf[i].flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
  224. continue;
  225. flush_list = av_fast_realloc(s->scratch, &s->scratch_size,
  226. (++flush_count)*sizeof(*flush_list));
  227. if (!flush_list)
  228. return AVERROR(ENOMEM);
  229. flush_list[flush_count - 1] = flush_buf;
  230. }
  231. }
  232. if (flush_count) {
  233. ret = vkFlushMappedMemoryRanges(s->hwctx->act_dev, flush_count,
  234. flush_list);
  235. if (ret != VK_SUCCESS) {
  236. av_log(avctx, AV_LOG_ERROR, "Failed to flush memory: %s\n",
  237. ff_vk_ret2str(ret));
  238. err = AVERROR_EXTERNAL; /* We still want to try to unmap them */
  239. }
  240. }
  241. for (int i = 0; i < nb_buffers; i++)
  242. vkUnmapMemory(s->hwctx->act_dev, buf[i].mem);
  243. return err;
  244. }
  245. void ff_vk_free_buf(AVFilterContext *avctx, FFVkBuffer *buf)
  246. {
  247. VulkanFilterContext *s = avctx->priv;
  248. if (!buf)
  249. return;
  250. if (buf->buf != VK_NULL_HANDLE)
  251. vkDestroyBuffer(s->hwctx->act_dev, buf->buf, s->hwctx->alloc);
  252. if (buf->mem != VK_NULL_HANDLE)
  253. vkFreeMemory(s->hwctx->act_dev, buf->mem, s->hwctx->alloc);
  254. }
  255. int ff_vk_add_push_constant(AVFilterContext *avctx, VulkanPipeline *pl,
  256. int offset, int size, VkShaderStageFlagBits stage)
  257. {
  258. VkPushConstantRange *pc;
  259. pl->push_consts = av_realloc_array(pl->push_consts, sizeof(*pl->push_consts),
  260. pl->push_consts_num + 1);
  261. if (!pl->push_consts)
  262. return AVERROR(ENOMEM);
  263. pc = &pl->push_consts[pl->push_consts_num++];
  264. memset(pc, 0, sizeof(*pc));
  265. pc->stageFlags = stage;
  266. pc->offset = offset;
  267. pc->size = size;
  268. return 0;
  269. }
  270. FN_CREATING(VulkanFilterContext, FFVkExecContext, exec_ctx, exec_ctx, exec_ctx_num)
  271. int ff_vk_create_exec_ctx(AVFilterContext *avctx, FFVkExecContext **ctx, int queue)
  272. {
  273. VkResult ret;
  274. FFVkExecContext *e;
  275. VulkanFilterContext *s = avctx->priv;
  276. VkCommandPoolCreateInfo cqueue_create = {
  277. .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  278. .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
  279. .queueFamilyIndex = queue,
  280. };
  281. VkCommandBufferAllocateInfo cbuf_create = {
  282. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  283. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  284. .commandBufferCount = 1,
  285. };
  286. VkFenceCreateInfo fence_spawn = { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
  287. e = create_exec_ctx(s);
  288. if (!e)
  289. return AVERROR(ENOMEM);
  290. ret = vkCreateCommandPool(s->hwctx->act_dev, &cqueue_create,
  291. s->hwctx->alloc, &e->pool);
  292. if (ret != VK_SUCCESS) {
  293. av_log(avctx, AV_LOG_ERROR, "Command pool creation failure: %s\n",
  294. ff_vk_ret2str(ret));
  295. return 1;
  296. }
  297. cbuf_create.commandPool = e->pool;
  298. ret = vkAllocateCommandBuffers(s->hwctx->act_dev, &cbuf_create, &e->buf);
  299. if (ret != VK_SUCCESS) {
  300. av_log(avctx, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
  301. ff_vk_ret2str(ret));
  302. return 1;
  303. }
  304. ret = vkCreateFence(s->hwctx->act_dev, &fence_spawn,
  305. s->hwctx->alloc, &e->fence);
  306. if (ret != VK_SUCCESS) {
  307. av_log(avctx, AV_LOG_ERROR, "Failed to create frame fence: %s\n",
  308. ff_vk_ret2str(ret));
  309. return 1;
  310. }
  311. vkGetDeviceQueue(s->hwctx->act_dev, queue, 0, &e->queue);
  312. *ctx = e;
  313. return 0;
  314. }
  315. int ff_vk_start_exec_recording(AVFilterContext *avctx, FFVkExecContext *e)
  316. {
  317. VkResult ret;
  318. VkCommandBufferBeginInfo cmd_start = {
  319. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  320. .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
  321. };
  322. e->sem_wait_cnt = 0;
  323. e->sem_sig_cnt = 0;
  324. ret = vkBeginCommandBuffer(e->buf, &cmd_start);
  325. if (ret != VK_SUCCESS) {
  326. av_log(avctx, AV_LOG_ERROR, "Failed to start command recoding: %s\n",
  327. ff_vk_ret2str(ret));
  328. return AVERROR_EXTERNAL;
  329. }
  330. return 0;
  331. }
  332. int ff_vk_add_exec_dep(AVFilterContext *avctx, FFVkExecContext *e,
  333. AVFrame *frame, VkPipelineStageFlagBits in_wait_dst_flag)
  334. {
  335. AVVkFrame *f = (AVVkFrame *)frame->data[0];
  336. e->sem_wait = av_fast_realloc(e->sem_wait, &e->sem_wait_alloc,
  337. (e->sem_wait_cnt + 1)*sizeof(*e->sem_wait));
  338. if (!e->sem_wait)
  339. return AVERROR(ENOMEM);
  340. e->sem_wait_dst = av_fast_realloc(e->sem_wait_dst, &e->sem_wait_dst_alloc,
  341. (e->sem_wait_cnt + 1)*sizeof(*e->sem_wait_dst));
  342. if (!e->sem_wait_dst)
  343. return AVERROR(ENOMEM);
  344. e->sem_sig = av_fast_realloc(e->sem_sig, &e->sem_sig_alloc,
  345. (e->sem_sig_cnt + 1)*sizeof(*e->sem_sig));
  346. if (!e->sem_sig)
  347. return AVERROR(ENOMEM);
  348. e->sem_wait[e->sem_wait_cnt] = f->sem;
  349. e->sem_wait_dst[e->sem_wait_cnt] = in_wait_dst_flag;
  350. e->sem_wait_cnt++;
  351. e->sem_sig[e->sem_sig_cnt] = f->sem;
  352. e->sem_sig_cnt++;
  353. return 0;
  354. }
  355. int ff_vk_submit_exec_queue(AVFilterContext *avctx, FFVkExecContext *e)
  356. {
  357. VkResult ret;
  358. VulkanFilterContext *s = avctx->priv;
  359. VkSubmitInfo s_info = {
  360. .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  361. .commandBufferCount = 1,
  362. .pCommandBuffers = &e->buf,
  363. .pWaitSemaphores = e->sem_wait,
  364. .pWaitDstStageMask = e->sem_wait_dst,
  365. .waitSemaphoreCount = e->sem_wait_cnt,
  366. .pSignalSemaphores = e->sem_sig,
  367. .signalSemaphoreCount = e->sem_sig_cnt,
  368. };
  369. vkEndCommandBuffer(e->buf);
  370. ret = vkQueueSubmit(e->queue, 1, &s_info, e->fence);
  371. if (ret != VK_SUCCESS) {
  372. av_log(avctx, AV_LOG_ERROR, "Unable to submit command buffer: %s\n",
  373. ff_vk_ret2str(ret));
  374. return AVERROR_EXTERNAL;
  375. }
  376. vkWaitForFences(s->hwctx->act_dev, 1, &e->fence, VK_TRUE, UINT64_MAX);
  377. vkResetFences(s->hwctx->act_dev, 1, &e->fence);
  378. return 0;
  379. }
  380. int ff_vk_filter_query_formats(AVFilterContext *avctx)
  381. {
  382. static const enum AVPixelFormat pixel_formats[] = {
  383. AV_PIX_FMT_VULKAN, AV_PIX_FMT_NONE,
  384. };
  385. AVFilterFormats *pix_fmts = ff_make_format_list(pixel_formats);
  386. if (!pix_fmts)
  387. return AVERROR(ENOMEM);
  388. return ff_set_common_formats(avctx, pix_fmts);
  389. }
  390. static int vulkan_filter_set_device(AVFilterContext *avctx,
  391. AVBufferRef *device)
  392. {
  393. VulkanFilterContext *s = avctx->priv;
  394. av_buffer_unref(&s->device_ref);
  395. s->device_ref = av_buffer_ref(device);
  396. if (!s->device_ref)
  397. return AVERROR(ENOMEM);
  398. s->device = (AVHWDeviceContext*)s->device_ref->data;
  399. s->hwctx = s->device->hwctx;
  400. return 0;
  401. }
  402. static int vulkan_filter_set_frames(AVFilterContext *avctx,
  403. AVBufferRef *frames)
  404. {
  405. VulkanFilterContext *s = avctx->priv;
  406. av_buffer_unref(&s->frames_ref);
  407. s->frames_ref = av_buffer_ref(frames);
  408. if (!s->frames_ref)
  409. return AVERROR(ENOMEM);
  410. return 0;
  411. }
  412. int ff_vk_filter_config_input(AVFilterLink *inlink)
  413. {
  414. int err;
  415. AVFilterContext *avctx = inlink->dst;
  416. VulkanFilterContext *s = avctx->priv;
  417. AVHWFramesContext *input_frames;
  418. if (!inlink->hw_frames_ctx) {
  419. av_log(avctx, AV_LOG_ERROR, "Vulkan filtering requires a "
  420. "hardware frames context on the input.\n");
  421. return AVERROR(EINVAL);
  422. }
  423. /* Extract the device and default output format from the first input. */
  424. if (avctx->inputs[0] != inlink)
  425. return 0;
  426. input_frames = (AVHWFramesContext*)inlink->hw_frames_ctx->data;
  427. if (input_frames->format != AV_PIX_FMT_VULKAN)
  428. return AVERROR(EINVAL);
  429. err = vulkan_filter_set_device(avctx, input_frames->device_ref);
  430. if (err < 0)
  431. return err;
  432. err = vulkan_filter_set_frames(avctx, inlink->hw_frames_ctx);
  433. if (err < 0)
  434. return err;
  435. /* Default output parameters match input parameters. */
  436. s->input_format = input_frames->sw_format;
  437. if (s->output_format == AV_PIX_FMT_NONE)
  438. s->output_format = input_frames->sw_format;
  439. if (!s->output_width)
  440. s->output_width = inlink->w;
  441. if (!s->output_height)
  442. s->output_height = inlink->h;
  443. return 0;
  444. }
  445. int ff_vk_filter_config_output_inplace(AVFilterLink *outlink)
  446. {
  447. int err;
  448. AVFilterContext *avctx = outlink->src;
  449. VulkanFilterContext *s = avctx->priv;
  450. av_buffer_unref(&outlink->hw_frames_ctx);
  451. if (!s->device_ref) {
  452. if (!avctx->hw_device_ctx) {
  453. av_log(avctx, AV_LOG_ERROR, "Vulkan filtering requires a "
  454. "Vulkan device.\n");
  455. return AVERROR(EINVAL);
  456. }
  457. err = vulkan_filter_set_device(avctx, avctx->hw_device_ctx);
  458. if (err < 0)
  459. return err;
  460. }
  461. outlink->hw_frames_ctx = av_buffer_ref(s->frames_ref);
  462. if (!outlink->hw_frames_ctx)
  463. return AVERROR(ENOMEM);
  464. outlink->w = s->output_width;
  465. outlink->h = s->output_height;
  466. return 0;
  467. }
  468. int ff_vk_filter_config_output(AVFilterLink *outlink)
  469. {
  470. int err;
  471. AVFilterContext *avctx = outlink->src;
  472. VulkanFilterContext *s = avctx->priv;
  473. AVBufferRef *output_frames_ref;
  474. AVHWFramesContext *output_frames;
  475. av_buffer_unref(&outlink->hw_frames_ctx);
  476. if (!s->device_ref) {
  477. if (!avctx->hw_device_ctx) {
  478. av_log(avctx, AV_LOG_ERROR, "Vulkan filtering requires a "
  479. "Vulkan device.\n");
  480. return AVERROR(EINVAL);
  481. }
  482. err = vulkan_filter_set_device(avctx, avctx->hw_device_ctx);
  483. if (err < 0)
  484. return err;
  485. }
  486. output_frames_ref = av_hwframe_ctx_alloc(s->device_ref);
  487. if (!output_frames_ref) {
  488. err = AVERROR(ENOMEM);
  489. goto fail;
  490. }
  491. output_frames = (AVHWFramesContext*)output_frames_ref->data;
  492. output_frames->format = AV_PIX_FMT_VULKAN;
  493. output_frames->sw_format = s->output_format;
  494. output_frames->width = s->output_width;
  495. output_frames->height = s->output_height;
  496. err = av_hwframe_ctx_init(output_frames_ref);
  497. if (err < 0) {
  498. av_log(avctx, AV_LOG_ERROR, "Failed to initialise output "
  499. "frames: %d.\n", err);
  500. goto fail;
  501. }
  502. outlink->hw_frames_ctx = output_frames_ref;
  503. outlink->w = s->output_width;
  504. outlink->h = s->output_height;
  505. return 0;
  506. fail:
  507. av_buffer_unref(&output_frames_ref);
  508. return err;
  509. }
  510. int ff_vk_filter_init(AVFilterContext *avctx)
  511. {
  512. VulkanFilterContext *s = avctx->priv;
  513. s->output_format = AV_PIX_FMT_NONE;
  514. if (glslang_init())
  515. return AVERROR_EXTERNAL;
  516. return 0;
  517. }
  518. FN_CREATING(VulkanFilterContext, VkSampler, sampler, samplers, samplers_num)
  519. VkSampler *ff_vk_init_sampler(AVFilterContext *avctx, int unnorm_coords,
  520. VkFilter filt)
  521. {
  522. VkResult ret;
  523. VulkanFilterContext *s = avctx->priv;
  524. VkSamplerCreateInfo sampler_info = {
  525. .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
  526. .magFilter = filt,
  527. .minFilter = sampler_info.magFilter,
  528. .mipmapMode = unnorm_coords ? VK_SAMPLER_MIPMAP_MODE_NEAREST :
  529. VK_SAMPLER_MIPMAP_MODE_LINEAR,
  530. .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
  531. .addressModeV = sampler_info.addressModeU,
  532. .addressModeW = sampler_info.addressModeU,
  533. .anisotropyEnable = VK_FALSE,
  534. .compareOp = VK_COMPARE_OP_NEVER,
  535. .borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
  536. .unnormalizedCoordinates = unnorm_coords,
  537. };
  538. VkSampler *sampler = create_sampler(s);
  539. if (!sampler)
  540. return NULL;
  541. ret = vkCreateSampler(s->hwctx->act_dev, &sampler_info,
  542. s->hwctx->alloc, sampler);
  543. if (ret != VK_SUCCESS) {
  544. av_log(avctx, AV_LOG_ERROR, "Unable to init sampler: %s\n",
  545. ff_vk_ret2str(ret));
  546. return NULL;
  547. }
  548. return sampler;
  549. }
  550. int ff_vk_mt_is_np_rgb(enum AVPixelFormat pix_fmt)
  551. {
  552. if (pix_fmt == AV_PIX_FMT_ABGR || pix_fmt == AV_PIX_FMT_BGRA ||
  553. pix_fmt == AV_PIX_FMT_RGBA || pix_fmt == AV_PIX_FMT_RGB24 ||
  554. pix_fmt == AV_PIX_FMT_BGR24 || pix_fmt == AV_PIX_FMT_RGB48 ||
  555. pix_fmt == AV_PIX_FMT_RGBA64 || pix_fmt == AV_PIX_FMT_RGB565 ||
  556. pix_fmt == AV_PIX_FMT_BGR565 || pix_fmt == AV_PIX_FMT_BGR0 ||
  557. pix_fmt == AV_PIX_FMT_0BGR || pix_fmt == AV_PIX_FMT_RGB0)
  558. return 1;
  559. return 0;
  560. }
  561. const char *ff_vk_shader_rep_fmt(enum AVPixelFormat pixfmt)
  562. {
  563. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pixfmt);
  564. const int high = desc->comp[0].depth > 8;
  565. return high ? "rgba16f" : "rgba8";
  566. }
  567. int ff_vk_create_imageview(AVFilterContext *avctx, VkImageView *v, VkImage img,
  568. VkFormat fmt, const VkComponentMapping map)
  569. {
  570. VulkanFilterContext *s = avctx->priv;
  571. VkImageViewCreateInfo imgview_spawn = {
  572. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  573. .pNext = NULL,
  574. .image = img,
  575. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  576. .format = fmt,
  577. .components = map,
  578. .subresourceRange = {
  579. .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  580. .baseMipLevel = 0,
  581. .levelCount = 1,
  582. .baseArrayLayer = 0,
  583. .layerCount = 1,
  584. },
  585. };
  586. VkResult ret = vkCreateImageView(s->hwctx->act_dev, &imgview_spawn,
  587. s->hwctx->alloc, v);
  588. if (ret != VK_SUCCESS) {
  589. av_log(s, AV_LOG_ERROR, "Failed to create imageview: %s\n",
  590. ff_vk_ret2str(ret));
  591. return AVERROR_EXTERNAL;
  592. }
  593. return 0;
  594. }
  595. void ff_vk_destroy_imageview(AVFilterContext *avctx, VkImageView *v)
  596. {
  597. VulkanFilterContext *s = avctx->priv;
  598. if (v && *v) {
  599. vkDestroyImageView(s->hwctx->act_dev, *v, s->hwctx->alloc);
  600. *v = NULL;
  601. }
  602. }
  603. FN_CREATING(VulkanPipeline, SPIRVShader, shader, shaders, shaders_num)
  604. SPIRVShader *ff_vk_init_shader(AVFilterContext *avctx, VulkanPipeline *pl,
  605. const char *name, VkShaderStageFlags stage)
  606. {
  607. SPIRVShader *shd = create_shader(pl);
  608. if (!shd)
  609. return NULL;
  610. av_bprint_init(&shd->src, 0, AV_BPRINT_SIZE_UNLIMITED);
  611. shd->shader.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  612. shd->shader.stage = stage;
  613. shd->name = name;
  614. GLSLF(0, #version %i ,460);
  615. GLSLC(0, #define IS_WITHIN(v1, v2) ((v1.x < v2.x) && (v1.y < v2.y)) );
  616. GLSLC(0, );
  617. return shd;
  618. }
  619. void ff_vk_set_compute_shader_sizes(AVFilterContext *avctx, SPIRVShader *shd,
  620. int local_size[3])
  621. {
  622. shd->local_size[0] = local_size[0];
  623. shd->local_size[1] = local_size[1];
  624. shd->local_size[2] = local_size[2];
  625. av_bprintf(&shd->src, "layout (local_size_x = %i, "
  626. "local_size_y = %i, local_size_z = %i) in;\n\n",
  627. shd->local_size[0], shd->local_size[1], shd->local_size[2]);
  628. }
  629. static void print_shader(AVFilterContext *avctx, SPIRVShader *shd, int prio)
  630. {
  631. int line = 0;
  632. const char *p = shd->src.str;
  633. const char *start = p;
  634. AVBPrint buf;
  635. av_bprint_init(&buf, 0, AV_BPRINT_SIZE_UNLIMITED);
  636. for (int i = 0; i < strlen(p); i++) {
  637. if (p[i] == '\n') {
  638. av_bprintf(&buf, "%i\t", ++line);
  639. av_bprint_append_data(&buf, start, &p[i] - start + 1);
  640. start = &p[i + 1];
  641. }
  642. }
  643. av_log(avctx, prio, "Shader %s: \n%s", shd->name, buf.str);
  644. av_bprint_finalize(&buf, NULL);
  645. }
  646. int ff_vk_compile_shader(AVFilterContext *avctx, SPIRVShader *shd,
  647. const char *entrypoint)
  648. {
  649. VkResult ret;
  650. VulkanFilterContext *s = avctx->priv;
  651. VkShaderModuleCreateInfo shader_create;
  652. GLSlangResult *res;
  653. static const enum GLSlangStage emap[] = {
  654. [VK_SHADER_STAGE_VERTEX_BIT] = GLSLANG_VERTEX,
  655. [VK_SHADER_STAGE_FRAGMENT_BIT] = GLSLANG_FRAGMENT,
  656. [VK_SHADER_STAGE_COMPUTE_BIT] = GLSLANG_COMPUTE,
  657. };
  658. shd->shader.pName = entrypoint;
  659. res = glslang_compile(shd->src.str, emap[shd->shader.stage]);
  660. if (!res)
  661. return AVERROR(ENOMEM);
  662. if (res->rval) {
  663. av_log(avctx, AV_LOG_ERROR, "Error compiling shader %s: %s!\n",
  664. shd->name, av_err2str(res->rval));
  665. print_shader(avctx, shd, AV_LOG_ERROR);
  666. if (res->error_msg)
  667. av_log(avctx, AV_LOG_ERROR, "%s", res->error_msg);
  668. av_free(res->error_msg);
  669. return res->rval;
  670. }
  671. print_shader(avctx, shd, AV_LOG_VERBOSE);
  672. shader_create.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  673. shader_create.pNext = NULL;
  674. shader_create.codeSize = res->size;
  675. shader_create.flags = 0;
  676. shader_create.pCode = res->data;
  677. ret = vkCreateShaderModule(s->hwctx->act_dev, &shader_create, NULL,
  678. &shd->shader.module);
  679. /* Free the GLSlangResult struct */
  680. av_free(res);
  681. if (ret != VK_SUCCESS) {
  682. av_log(avctx, AV_LOG_ERROR, "Unable to create shader module: %s\n",
  683. ff_vk_ret2str(ret));
  684. return AVERROR_EXTERNAL;
  685. }
  686. av_log(avctx, AV_LOG_VERBOSE, "Shader %s linked! Size: %zu bytes\n",
  687. shd->name, shader_create.codeSize);
  688. return 0;
  689. }
  690. static const struct descriptor_props {
  691. size_t struct_size; /* Size of the opaque which updates the descriptor */
  692. const char *type;
  693. int is_uniform;
  694. int mem_quali; /* Can use a memory qualifier */
  695. int dim_needed; /* Must indicate dimension */
  696. int buf_content; /* Must indicate buffer contents */
  697. } descriptor_props[] = {
  698. [VK_DESCRIPTOR_TYPE_SAMPLER] = { sizeof(VkDescriptorImageInfo), "sampler", 1, 0, 0, 0, },
  699. [VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] = { sizeof(VkDescriptorImageInfo), "texture", 1, 0, 1, 0, },
  700. [VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] = { sizeof(VkDescriptorImageInfo), "image", 1, 1, 1, 0, },
  701. [VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] = { sizeof(VkDescriptorImageInfo), "subpassInput", 1, 0, 0, 0, },
  702. [VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] = { sizeof(VkDescriptorImageInfo), "sampler", 1, 0, 1, 0, },
  703. [VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] = { sizeof(VkDescriptorBufferInfo), NULL, 1, 0, 0, 1, },
  704. [VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] = { sizeof(VkDescriptorBufferInfo), "buffer", 0, 1, 0, 1, },
  705. [VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] = { sizeof(VkDescriptorBufferInfo), NULL, 1, 0, 0, 1, },
  706. [VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] = { sizeof(VkDescriptorBufferInfo), "buffer", 0, 1, 0, 1, },
  707. [VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER] = { sizeof(VkBufferView), "samplerBuffer", 1, 0, 0, 0, },
  708. [VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER] = { sizeof(VkBufferView), "imageBuffer", 1, 0, 0, 0, },
  709. };
  710. int ff_vk_add_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl,
  711. SPIRVShader *shd, VulkanDescriptorSetBinding *desc,
  712. int num, int only_print_to_shader)
  713. {
  714. VkResult ret;
  715. VkDescriptorSetLayout *layout;
  716. VulkanFilterContext *s = avctx->priv;
  717. if (only_print_to_shader)
  718. goto print;
  719. pl->desc_layout = av_realloc_array(pl->desc_layout, sizeof(*pl->desc_layout),
  720. pl->descriptor_sets_num + 1);
  721. if (!pl->desc_layout)
  722. return AVERROR(ENOMEM);
  723. layout = &pl->desc_layout[pl->descriptor_sets_num];
  724. memset(layout, 0, sizeof(*layout));
  725. { /* Create descriptor set layout descriptions */
  726. VkDescriptorSetLayoutCreateInfo desc_create_layout = { 0 };
  727. VkDescriptorSetLayoutBinding *desc_binding;
  728. desc_binding = av_mallocz(sizeof(*desc_binding)*num);
  729. if (!desc_binding)
  730. return AVERROR(ENOMEM);
  731. for (int i = 0; i < num; i++) {
  732. desc_binding[i].binding = i;
  733. desc_binding[i].descriptorType = desc[i].type;
  734. desc_binding[i].descriptorCount = FFMAX(desc[i].elems, 1);
  735. desc_binding[i].stageFlags = desc[i].stages;
  736. desc_binding[i].pImmutableSamplers = desc[i].samplers;
  737. }
  738. desc_create_layout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  739. desc_create_layout.pBindings = desc_binding;
  740. desc_create_layout.bindingCount = num;
  741. ret = vkCreateDescriptorSetLayout(s->hwctx->act_dev, &desc_create_layout,
  742. s->hwctx->alloc, layout);
  743. av_free(desc_binding);
  744. if (ret != VK_SUCCESS) {
  745. av_log(avctx, AV_LOG_ERROR, "Unable to init descriptor set "
  746. "layout: %s\n", ff_vk_ret2str(ret));
  747. return AVERROR_EXTERNAL;
  748. }
  749. }
  750. { /* Pool each descriptor by type and update pool counts */
  751. for (int i = 0; i < num; i++) {
  752. int j;
  753. for (j = 0; j < pl->pool_size_desc_num; j++)
  754. if (pl->pool_size_desc[j].type == desc[i].type)
  755. break;
  756. if (j >= pl->pool_size_desc_num) {
  757. pl->pool_size_desc = av_realloc_array(pl->pool_size_desc,
  758. sizeof(*pl->pool_size_desc),
  759. ++pl->pool_size_desc_num);
  760. if (!pl->pool_size_desc)
  761. return AVERROR(ENOMEM);
  762. memset(&pl->pool_size_desc[j], 0, sizeof(VkDescriptorPoolSize));
  763. }
  764. pl->pool_size_desc[j].type = desc[i].type;
  765. pl->pool_size_desc[j].descriptorCount += FFMAX(desc[i].elems, 1);
  766. }
  767. }
  768. { /* Create template creation struct */
  769. VkDescriptorUpdateTemplateCreateInfo *dt;
  770. VkDescriptorUpdateTemplateEntry *des_entries;
  771. /* Freed after descriptor set initialization */
  772. des_entries = av_mallocz(num*sizeof(VkDescriptorUpdateTemplateEntry));
  773. if (!des_entries)
  774. return AVERROR(ENOMEM);
  775. for (int i = 0; i < num; i++) {
  776. des_entries[i].dstBinding = i;
  777. des_entries[i].descriptorType = desc[i].type;
  778. des_entries[i].descriptorCount = FFMAX(desc[i].elems, 1);
  779. des_entries[i].dstArrayElement = 0;
  780. des_entries[i].offset = ((uint8_t *)desc[i].updater) - (uint8_t *)s;
  781. des_entries[i].stride = descriptor_props[desc[i].type].struct_size;
  782. }
  783. pl->desc_template_info = av_realloc_array(pl->desc_template_info,
  784. sizeof(*pl->desc_template_info),
  785. pl->descriptor_sets_num + 1);
  786. if (!pl->desc_template_info)
  787. return AVERROR(ENOMEM);
  788. dt = &pl->desc_template_info[pl->descriptor_sets_num];
  789. memset(dt, 0, sizeof(*dt));
  790. dt->sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
  791. dt->templateType = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET;
  792. dt->descriptorSetLayout = *layout;
  793. dt->pDescriptorUpdateEntries = des_entries;
  794. dt->descriptorUpdateEntryCount = num;
  795. }
  796. pl->descriptor_sets_num++;
  797. print:
  798. /* Write shader info */
  799. for (int i = 0; i < num; i++) {
  800. const struct descriptor_props *prop = &descriptor_props[desc[i].type];
  801. GLSLA("layout (set = %i, binding = %i", pl->descriptor_sets_num - 1, i);
  802. if (desc[i].mem_layout)
  803. GLSLA(", %s", desc[i].mem_layout);
  804. GLSLA(")");
  805. if (prop->is_uniform)
  806. GLSLA(" uniform");
  807. if (prop->mem_quali && desc[i].mem_quali)
  808. GLSLA(" %s", desc[i].mem_quali);
  809. if (prop->type)
  810. GLSLA(" %s", prop->type);
  811. if (prop->dim_needed)
  812. GLSLA("%iD", desc[i].dimensions);
  813. GLSLA(" %s", desc[i].name);
  814. if (prop->buf_content)
  815. GLSLA(" {\n %s\n}", desc[i].buf_content);
  816. else if (desc[i].elems > 0)
  817. GLSLA("[%i]", desc[i].elems);
  818. GLSLA(";\n");
  819. }
  820. GLSLA("\n");
  821. return 0;
  822. }
  823. void ff_vk_update_descriptor_set(AVFilterContext *avctx, VulkanPipeline *pl,
  824. int set_id)
  825. {
  826. VulkanFilterContext *s = avctx->priv;
  827. vkUpdateDescriptorSetWithTemplate(s->hwctx->act_dev,
  828. pl->desc_set[set_id],
  829. pl->desc_template[set_id], s);
  830. }
  831. void ff_vk_update_push_exec(AVFilterContext *avctx, FFVkExecContext *e,
  832. VkShaderStageFlagBits stage, int offset,
  833. size_t size, void *src)
  834. {
  835. vkCmdPushConstants(e->buf, e->bound_pl->pipeline_layout,
  836. stage, offset, size, src);
  837. }
  838. int ff_vk_init_pipeline_layout(AVFilterContext *avctx, VulkanPipeline *pl)
  839. {
  840. VkResult ret;
  841. VulkanFilterContext *s = avctx->priv;
  842. { /* Init descriptor set pool */
  843. VkDescriptorPoolCreateInfo pool_create_info = {
  844. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
  845. .poolSizeCount = pl->pool_size_desc_num,
  846. .pPoolSizes = pl->pool_size_desc,
  847. .maxSets = pl->descriptor_sets_num,
  848. };
  849. ret = vkCreateDescriptorPool(s->hwctx->act_dev, &pool_create_info,
  850. s->hwctx->alloc, &pl->desc_pool);
  851. av_freep(&pl->pool_size_desc);
  852. if (ret != VK_SUCCESS) {
  853. av_log(avctx, AV_LOG_ERROR, "Unable to init descriptor set "
  854. "pool: %s\n", ff_vk_ret2str(ret));
  855. return AVERROR_EXTERNAL;
  856. }
  857. }
  858. { /* Allocate descriptor sets */
  859. VkDescriptorSetAllocateInfo alloc_info = {
  860. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
  861. .descriptorPool = pl->desc_pool,
  862. .descriptorSetCount = pl->descriptor_sets_num,
  863. .pSetLayouts = pl->desc_layout,
  864. };
  865. pl->desc_set = av_malloc(pl->descriptor_sets_num*sizeof(*pl->desc_set));
  866. if (!pl->desc_set)
  867. return AVERROR(ENOMEM);
  868. ret = vkAllocateDescriptorSets(s->hwctx->act_dev, &alloc_info,
  869. pl->desc_set);
  870. if (ret != VK_SUCCESS) {
  871. av_log(avctx, AV_LOG_ERROR, "Unable to allocate descriptor set: %s\n",
  872. ff_vk_ret2str(ret));
  873. return AVERROR_EXTERNAL;
  874. }
  875. }
  876. { /* Finally create the pipeline layout */
  877. VkPipelineLayoutCreateInfo spawn_pipeline_layout = {
  878. .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
  879. .setLayoutCount = pl->descriptor_sets_num,
  880. .pSetLayouts = pl->desc_layout,
  881. .pushConstantRangeCount = pl->push_consts_num,
  882. .pPushConstantRanges = pl->push_consts,
  883. };
  884. ret = vkCreatePipelineLayout(s->hwctx->act_dev, &spawn_pipeline_layout,
  885. s->hwctx->alloc, &pl->pipeline_layout);
  886. av_freep(&pl->push_consts);
  887. pl->push_consts_num = 0;
  888. if (ret != VK_SUCCESS) {
  889. av_log(avctx, AV_LOG_ERROR, "Unable to init pipeline layout: %s\n",
  890. ff_vk_ret2str(ret));
  891. return AVERROR_EXTERNAL;
  892. }
  893. }
  894. { /* Descriptor template (for tightly packed descriptors) */
  895. VkDescriptorUpdateTemplateCreateInfo *desc_template_info;
  896. pl->desc_template = av_malloc(pl->descriptor_sets_num*sizeof(*pl->desc_template));
  897. if (!pl->desc_template)
  898. return AVERROR(ENOMEM);
  899. /* Create update templates for the descriptor sets */
  900. for (int i = 0; i < pl->descriptor_sets_num; i++) {
  901. desc_template_info = &pl->desc_template_info[i];
  902. desc_template_info->pipelineLayout = pl->pipeline_layout;
  903. ret = vkCreateDescriptorUpdateTemplate(s->hwctx->act_dev,
  904. desc_template_info,
  905. s->hwctx->alloc,
  906. &pl->desc_template[i]);
  907. av_free((void *)desc_template_info->pDescriptorUpdateEntries);
  908. if (ret != VK_SUCCESS) {
  909. av_log(avctx, AV_LOG_ERROR, "Unable to init descriptor "
  910. "template: %s\n", ff_vk_ret2str(ret));
  911. return AVERROR_EXTERNAL;
  912. }
  913. }
  914. av_freep(&pl->desc_template_info);
  915. }
  916. return 0;
  917. }
  918. FN_CREATING(VulkanFilterContext, VulkanPipeline, pipeline, pipelines, pipelines_num)
  919. VulkanPipeline *ff_vk_create_pipeline(AVFilterContext *avctx)
  920. {
  921. return create_pipeline(avctx->priv);
  922. }
  923. int ff_vk_init_compute_pipeline(AVFilterContext *avctx, VulkanPipeline *pl)
  924. {
  925. int i;
  926. VkResult ret;
  927. VulkanFilterContext *s = avctx->priv;
  928. VkComputePipelineCreateInfo pipe = {
  929. .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
  930. .layout = pl->pipeline_layout,
  931. };
  932. for (i = 0; i < pl->shaders_num; i++) {
  933. if (pl->shaders[i]->shader.stage & VK_SHADER_STAGE_COMPUTE_BIT) {
  934. pipe.stage = pl->shaders[i]->shader;
  935. break;
  936. }
  937. }
  938. if (i == pl->shaders_num) {
  939. av_log(avctx, AV_LOG_ERROR, "Can't init compute pipeline, no shader\n");
  940. return AVERROR(EINVAL);
  941. }
  942. ret = vkCreateComputePipelines(s->hwctx->act_dev, VK_NULL_HANDLE, 1, &pipe,
  943. s->hwctx->alloc, &pl->pipeline);
  944. if (ret != VK_SUCCESS) {
  945. av_log(avctx, AV_LOG_ERROR, "Unable to init compute pipeline: %s\n",
  946. ff_vk_ret2str(ret));
  947. return AVERROR_EXTERNAL;
  948. }
  949. pl->bind_point = VK_PIPELINE_BIND_POINT_COMPUTE;
  950. return 0;
  951. }
  952. void ff_vk_bind_pipeline_exec(AVFilterContext *avctx, FFVkExecContext *e,
  953. VulkanPipeline *pl)
  954. {
  955. vkCmdBindPipeline(e->buf, pl->bind_point, pl->pipeline);
  956. vkCmdBindDescriptorSets(e->buf, pl->bind_point, pl->pipeline_layout, 0,
  957. pl->descriptor_sets_num, pl->desc_set, 0, 0);
  958. e->bound_pl = pl;
  959. }
  960. static void free_exec_ctx(VulkanFilterContext *s, FFVkExecContext *e)
  961. {
  962. vkDestroyFence(s->hwctx->act_dev, e->fence, s->hwctx->alloc);
  963. if (e->buf != VK_NULL_HANDLE)
  964. vkFreeCommandBuffers(s->hwctx->act_dev, e->pool, 1, &e->buf);
  965. if (e->pool != VK_NULL_HANDLE)
  966. vkDestroyCommandPool(s->hwctx->act_dev, e->pool, s->hwctx->alloc);
  967. av_free(e->sem_wait);
  968. av_free(e->sem_wait_dst);
  969. av_free(e->sem_sig);
  970. av_free(e);
  971. }
  972. static void free_pipeline(VulkanFilterContext *s, VulkanPipeline *pl)
  973. {
  974. for (int i = 0; i < pl->shaders_num; i++) {
  975. SPIRVShader *shd = pl->shaders[i];
  976. av_bprint_finalize(&shd->src, NULL);
  977. vkDestroyShaderModule(s->hwctx->act_dev, shd->shader.module,
  978. s->hwctx->alloc);
  979. av_free(shd);
  980. }
  981. vkDestroyPipeline(s->hwctx->act_dev, pl->pipeline, s->hwctx->alloc);
  982. vkDestroyPipelineLayout(s->hwctx->act_dev, pl->pipeline_layout,
  983. s->hwctx->alloc);
  984. for (int i = 0; i < pl->descriptor_sets_num; i++) {
  985. if (pl->desc_template && pl->desc_template[i])
  986. vkDestroyDescriptorUpdateTemplate(s->hwctx->act_dev, pl->desc_template[i],
  987. s->hwctx->alloc);
  988. if (pl->desc_layout && pl->desc_layout[i])
  989. vkDestroyDescriptorSetLayout(s->hwctx->act_dev, pl->desc_layout[i],
  990. s->hwctx->alloc);
  991. }
  992. /* Also frees the descriptor sets */
  993. if (pl->desc_pool)
  994. vkDestroyDescriptorPool(s->hwctx->act_dev, pl->desc_pool,
  995. s->hwctx->alloc);
  996. av_freep(&pl->desc_set);
  997. av_freep(&pl->shaders);
  998. av_freep(&pl->desc_layout);
  999. av_freep(&pl->desc_template);
  1000. av_freep(&pl->push_consts);
  1001. pl->push_consts_num = 0;
  1002. /* Only freed in case of failure */
  1003. av_freep(&pl->pool_size_desc);
  1004. if (pl->desc_template_info) {
  1005. for (int i = 0; i < pl->descriptor_sets_num; i++)
  1006. av_free((void *)pl->desc_template_info[i].pDescriptorUpdateEntries);
  1007. av_freep(&pl->desc_template_info);
  1008. }
  1009. av_free(pl);
  1010. }
  1011. void ff_vk_filter_uninit(AVFilterContext *avctx)
  1012. {
  1013. VulkanFilterContext *s = avctx->priv;
  1014. glslang_uninit();
  1015. for (int i = 0; i < s->samplers_num; i++)
  1016. vkDestroySampler(s->hwctx->act_dev, *s->samplers[i], s->hwctx->alloc);
  1017. av_freep(&s->samplers);
  1018. for (int i = 0; i < s->pipelines_num; i++)
  1019. free_pipeline(s, s->pipelines[i]);
  1020. av_freep(&s->pipelines);
  1021. for (int i = 0; i < s->exec_ctx_num; i++)
  1022. free_exec_ctx(s, s->exec_ctx[i]);
  1023. av_freep(&s->exec_ctx);
  1024. av_freep(&s->scratch);
  1025. s->scratch_size = 0;
  1026. av_buffer_unref(&s->device_ref);
  1027. av_buffer_unref(&s->frames_ref);
  1028. }