2019-08-28 22:58:10 +02:00
|
|
|
/*
|
|
|
|
* This file is part of FFmpeg.
|
|
|
|
*
|
|
|
|
* FFmpeg is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* FFmpeg is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with FFmpeg; if not, write to the Free Software
|
|
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
|
|
*/
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
#define VK_NO_PROTOTYPES
|
2021-11-07 09:16:11 +02:00
|
|
|
#define VK_ENABLE_BETA_EXTENSIONS
|
2021-04-29 02:44:41 +02:00
|
|
|
|
2021-11-12 15:36:20 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
#include <windows.h> /* Included to prevent conflicts with CreateSemaphore */
|
2021-11-13 21:00:50 +02:00
|
|
|
#include <versionhelpers.h>
|
2021-11-12 15:36:20 +02:00
|
|
|
#include "compat/w32dlfcn.h"
|
|
|
|
#else
|
|
|
|
#include <dlfcn.h>
|
|
|
|
#endif
|
|
|
|
|
2021-11-05 14:50:32 +02:00
|
|
|
#include <unistd.h>
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
#include "config.h"
|
|
|
|
#include "pixdesc.h"
|
|
|
|
#include "avstring.h"
|
|
|
|
#include "imgutils.h"
|
|
|
|
#include "hwcontext.h"
|
2021-11-07 09:16:11 +02:00
|
|
|
#include "avassert.h"
|
2019-08-28 22:58:10 +02:00
|
|
|
#include "hwcontext_internal.h"
|
|
|
|
#include "hwcontext_vulkan.h"
|
|
|
|
|
2021-11-19 08:46:15 +02:00
|
|
|
#include "vulkan.h"
|
2021-11-07 16:57:35 +02:00
|
|
|
#include "vulkan_loader.h"
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
#if CONFIG_LIBDRM
|
|
|
|
#include <xf86drm.h>
|
|
|
|
#include <drm_fourcc.h>
|
|
|
|
#include "hwcontext_drm.h"
|
|
|
|
#if CONFIG_VAAPI
|
|
|
|
#include <va/va_drmcommon.h>
|
|
|
|
#include "hwcontext_vaapi.h"
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
#include "hwcontext_cuda_internal.h"
|
|
|
|
#include "cuda_check.h"
|
|
|
|
#define CHECK_CU(x) FF_CUDA_CHECK_DL(cuda_cu, cu, x)
|
|
|
|
#endif
|
|
|
|
|
2020-05-14 01:28:00 +02:00
|
|
|
typedef struct VulkanQueueCtx {
|
|
|
|
VkFence fence;
|
|
|
|
VkQueue queue;
|
|
|
|
int was_synchronous;
|
|
|
|
|
|
|
|
/* Buffer dependencies */
|
|
|
|
AVBufferRef **buf_deps;
|
|
|
|
int nb_buf_deps;
|
|
|
|
int buf_deps_alloc_size;
|
|
|
|
} VulkanQueueCtx;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
typedef struct VulkanExecCtx {
|
|
|
|
VkCommandPool pool;
|
2020-05-14 01:28:00 +02:00
|
|
|
VkCommandBuffer *bufs;
|
|
|
|
VulkanQueueCtx *queues;
|
|
|
|
int nb_queues;
|
|
|
|
int cur_queue_idx;
|
2019-08-28 22:58:10 +02:00
|
|
|
} VulkanExecCtx;
|
|
|
|
|
|
|
|
typedef struct VulkanDevicePriv {
|
2021-04-29 03:37:42 +02:00
|
|
|
/* Vulkan library and loader functions */
|
|
|
|
void *libvulkan;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions vkfn;
|
2021-04-29 02:44:41 +02:00
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* Properties */
|
2020-05-23 20:02:08 +02:00
|
|
|
VkPhysicalDeviceProperties2 props;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkPhysicalDeviceMemoryProperties mprops;
|
2020-05-23 20:02:08 +02:00
|
|
|
VkPhysicalDeviceExternalMemoryHostPropertiesEXT hprops;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
/* Features */
|
|
|
|
VkPhysicalDeviceVulkan11Features device_features_1_1;
|
|
|
|
VkPhysicalDeviceVulkan12Features device_features_1_2;
|
|
|
|
|
2020-05-10 22:53:39 +02:00
|
|
|
/* Queues */
|
2021-11-07 09:16:11 +02:00
|
|
|
uint32_t qfs[5];
|
2020-05-10 22:53:39 +02:00
|
|
|
int num_qfs;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* Debug callback */
|
|
|
|
VkDebugUtilsMessengerEXT debug_ctx;
|
|
|
|
|
|
|
|
/* Extensions */
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanExtensions extensions;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Settings */
|
|
|
|
int use_linear_images;
|
|
|
|
|
2021-12-07 11:05:51 +02:00
|
|
|
/* Option to allocate all image planes in a single allocation */
|
|
|
|
int contiguous_planes;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* Nvidia */
|
|
|
|
int dev_is_nvidia;
|
2021-12-07 11:05:51 +02:00
|
|
|
|
|
|
|
/* Intel */
|
|
|
|
int dev_is_intel;
|
2019-08-28 22:58:10 +02:00
|
|
|
} VulkanDevicePriv;
|
|
|
|
|
|
|
|
typedef struct VulkanFramesPriv {
|
2020-05-15 00:37:14 +02:00
|
|
|
/* Image conversions */
|
|
|
|
VulkanExecCtx conv_ctx;
|
2020-05-15 01:01:08 +02:00
|
|
|
|
|
|
|
/* Image transfers */
|
|
|
|
VulkanExecCtx upload_ctx;
|
|
|
|
VulkanExecCtx download_ctx;
|
2021-12-07 11:05:53 +02:00
|
|
|
|
|
|
|
/* Modifier info list to free at uninit */
|
|
|
|
VkImageDrmFormatModifierListCreateInfoEXT *modifier_info;
|
2019-08-28 22:58:10 +02:00
|
|
|
} VulkanFramesPriv;
|
|
|
|
|
|
|
|
typedef struct AVVkFrameInternal {
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
/* Importing external memory into cuda is really expensive so we keep the
|
|
|
|
* memory imported all the time */
|
|
|
|
AVBufferRef *cuda_fc_ref; /* Need to keep it around for uninit */
|
|
|
|
CUexternalMemory ext_mem[AV_NUM_DATA_POINTERS];
|
|
|
|
CUmipmappedArray cu_mma[AV_NUM_DATA_POINTERS];
|
|
|
|
CUarray cu_array[AV_NUM_DATA_POINTERS];
|
2020-05-12 00:27:01 +02:00
|
|
|
CUexternalSemaphore cu_sem[AV_NUM_DATA_POINTERS];
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
HANDLE ext_mem_handle[AV_NUM_DATA_POINTERS];
|
|
|
|
HANDLE ext_sem_handle[AV_NUM_DATA_POINTERS];
|
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
#endif
|
|
|
|
} AVVkFrameInternal;
|
|
|
|
|
|
|
|
#define ADD_VAL_TO_LIST(list, count, val) \
|
|
|
|
do { \
|
|
|
|
list = av_realloc_array(list, sizeof(*list), ++count); \
|
|
|
|
if (!list) { \
|
|
|
|
err = AVERROR(ENOMEM); \
|
2020-05-10 12:26:40 +02:00
|
|
|
goto fail; \
|
|
|
|
} \
|
|
|
|
list[count - 1] = av_strdup(val); \
|
|
|
|
if (!list[count - 1]) { \
|
|
|
|
err = AVERROR(ENOMEM); \
|
|
|
|
goto fail; \
|
2019-08-28 22:58:10 +02:00
|
|
|
} \
|
|
|
|
} while(0)
|
|
|
|
|
2021-11-26 11:10:56 +02:00
|
|
|
#define RELEASE_PROPS(props, count) \
|
|
|
|
if (props) { \
|
|
|
|
for (int i = 0; i < count; i++) \
|
|
|
|
av_free((void *)((props)[i])); \
|
|
|
|
av_free((void *)props); \
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
static const struct {
|
|
|
|
enum AVPixelFormat pixfmt;
|
2020-11-23 23:10:24 +02:00
|
|
|
const VkFormat vkfmts[4];
|
2019-08-28 22:58:10 +02:00
|
|
|
} vk_pixfmt_map[] = {
|
|
|
|
{ AV_PIX_FMT_GRAY8, { VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_GRAY16, { VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_GRAYF32, { VK_FORMAT_R32_SFLOAT } },
|
|
|
|
|
|
|
|
{ AV_PIX_FMT_NV12, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
|
2020-11-23 23:10:24 +02:00
|
|
|
{ AV_PIX_FMT_NV21, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_P010, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
|
lavu/hwcontext_vulkan: support mapping VUYX, P012, and XV36
If we want to be able to map between VAAPI and Vulkan (to do Vulkan
filtering), we need to have matching formats on each side.
The mappings here are not exact. In the same way that P010 is still
mapped to full 16 bit formats, P012 has to be mapped that way as well.
Similarly, VUYX has to be mapped to an alpha-equipped format, and XV36
has to be mapped to a fully 16bit alpha-equipped format.
While Vulkan seems to fundamentally lack formats with an undefined,
but physically present, alpha channel, it has have 10X6 and 12X4
formats that you could imagine using for P010, P012 and XV36, but these
formats don't support the STORAGE usage flag. Today, hwcontext_vulkan
requires all formats to be storable because it wants to be able to use
them to create writable images. Until that changes, which might happen,
we have to restrict the set of formats we use.
Finally, when mapping a Vulkan image back to vaapi, I observed that
the VK_FORMAT_R16G16B16A16_UNORM format we have to use for XV36 going
to Vulkan is mapped to Y416 when going to vaapi (which makes sense as
it's the exact matching format) so I had to add an entry for it even
though we don't use it directly.
2022-08-20 18:47:27 +02:00
|
|
|
{ AV_PIX_FMT_P012, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_P016, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
|
|
|
|
|
2020-11-23 23:10:24 +02:00
|
|
|
{ AV_PIX_FMT_NV16, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-23 23:10:24 +02:00
|
|
|
{ AV_PIX_FMT_NV24, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_NV42, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8G8_UNORM } },
|
|
|
|
|
|
|
|
{ AV_PIX_FMT_YUV420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUV420P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUV420P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_YUV420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
2020-11-23 23:10:24 +02:00
|
|
|
|
|
|
|
{ AV_PIX_FMT_YUV422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUV422P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUV422P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_YUV422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
2020-11-23 23:10:24 +02:00
|
|
|
|
|
|
|
{ AV_PIX_FMT_YUV444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUV444P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUV444P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_YUV444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
|
2020-11-23 23:10:24 +02:00
|
|
|
{ AV_PIX_FMT_YUVA420P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUVA420P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
/* There is no AV_PIX_FMT_YUVA420P12 */
|
|
|
|
{ AV_PIX_FMT_YUVA420P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
|
|
|
|
{ AV_PIX_FMT_YUVA422P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUVA422P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUVA422P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUVA422P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
|
|
|
|
{ AV_PIX_FMT_YUVA444P, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUVA444P10, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUVA444P12, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_YUVA444P16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
|
|
|
|
lavu/hwcontext_vulkan: support mapping VUYX, P012, and XV36
If we want to be able to map between VAAPI and Vulkan (to do Vulkan
filtering), we need to have matching formats on each side.
The mappings here are not exact. In the same way that P010 is still
mapped to full 16 bit formats, P012 has to be mapped that way as well.
Similarly, VUYX has to be mapped to an alpha-equipped format, and XV36
has to be mapped to a fully 16bit alpha-equipped format.
While Vulkan seems to fundamentally lack formats with an undefined,
but physically present, alpha channel, it has have 10X6 and 12X4
formats that you could imagine using for P010, P012 and XV36, but these
formats don't support the STORAGE usage flag. Today, hwcontext_vulkan
requires all formats to be storable because it wants to be able to use
them to create writable images. Until that changes, which might happen,
we have to restrict the set of formats we use.
Finally, when mapping a Vulkan image back to vaapi, I observed that
the VK_FORMAT_R16G16B16A16_UNORM format we have to use for XV36 going
to Vulkan is mapped to Y416 when going to vaapi (which makes sense as
it's the exact matching format) so I had to add an entry for it even
though we don't use it directly.
2022-08-20 18:47:27 +02:00
|
|
|
{ AV_PIX_FMT_VUYX, { VK_FORMAT_R8G8B8A8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_XV36, { VK_FORMAT_R16G16B16A16_UNORM } },
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_BGRA, { VK_FORMAT_B8G8R8A8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_RGBA, { VK_FORMAT_R8G8B8A8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_RGB24, { VK_FORMAT_R8G8B8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_BGR24, { VK_FORMAT_B8G8R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_RGB48, { VK_FORMAT_R16G16B16_UNORM } },
|
|
|
|
{ AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
|
2020-11-23 23:10:24 +02:00
|
|
|
{ AV_PIX_FMT_RGBA64, { VK_FORMAT_R16G16B16A16_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_RGB565, { VK_FORMAT_R5G6B5_UNORM_PACK16 } },
|
|
|
|
{ AV_PIX_FMT_BGR565, { VK_FORMAT_B5G6R5_UNORM_PACK16 } },
|
|
|
|
{ AV_PIX_FMT_BGR0, { VK_FORMAT_B8G8R8A8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_RGB0, { VK_FORMAT_R8G8B8A8_UNORM } },
|
|
|
|
|
2020-11-27 03:54:23 +02:00
|
|
|
/* Lower priority as there's an endianess-dependent overlap between these
|
|
|
|
* and rgba/bgr0, and PACK32 formats are more limited */
|
|
|
|
{ AV_PIX_FMT_BGR32, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
|
|
|
|
{ AV_PIX_FMT_0BGR32, { VK_FORMAT_A8B8G8R8_UNORM_PACK32 } },
|
|
|
|
|
2020-11-23 23:10:24 +02:00
|
|
|
{ AV_PIX_FMT_X2RGB10, { VK_FORMAT_A2R10G10B10_UNORM_PACK32 } },
|
|
|
|
|
|
|
|
{ AV_PIX_FMT_GBRAP, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
|
|
|
|
{ AV_PIX_FMT_GBRAP16, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
|
2019-08-28 22:58:10 +02:00
|
|
|
{ AV_PIX_FMT_GBRPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
|
2020-11-23 23:10:24 +02:00
|
|
|
{ AV_PIX_FMT_GBRAPF32, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
const VkFormat *av_vkfmt_from_pixfmt(enum AVPixelFormat p)
|
|
|
|
{
|
|
|
|
for (enum AVPixelFormat i = 0; i < FF_ARRAY_ELEMS(vk_pixfmt_map); i++)
|
|
|
|
if (vk_pixfmt_map[i].pixfmt == p)
|
|
|
|
return vk_pixfmt_map[i].vkfmts;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2021-12-07 11:05:53 +02:00
|
|
|
static const void *vk_find_struct(const void *chain, VkStructureType stype)
|
|
|
|
{
|
|
|
|
const VkBaseInStructure *in = chain;
|
|
|
|
while (in) {
|
|
|
|
if (in->sType == stype)
|
|
|
|
return in;
|
|
|
|
|
|
|
|
in = in->pNext;
|
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void vk_link_struct(void *chain, void *in)
|
|
|
|
{
|
|
|
|
VkBaseOutStructure *out = chain;
|
|
|
|
if (!in)
|
|
|
|
return;
|
|
|
|
|
|
|
|
while (out->pNext)
|
|
|
|
out = out->pNext;
|
|
|
|
|
|
|
|
out->pNext = in;
|
|
|
|
}
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
static int pixfmt_is_supported(AVHWDeviceContext *dev_ctx, enum AVPixelFormat p,
|
2019-08-28 22:58:10 +02:00
|
|
|
int linear)
|
|
|
|
{
|
2021-04-29 02:44:41 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = dev_ctx->hwctx;
|
|
|
|
VulkanDevicePriv *priv = dev_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &priv->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
const VkFormat *fmt = av_vkfmt_from_pixfmt(p);
|
|
|
|
int planes = av_pix_fmt_count_planes(p);
|
|
|
|
|
|
|
|
if (!fmt)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
VkFormatFeatureFlags flags;
|
|
|
|
VkFormatProperties2 prop = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
|
|
|
|
};
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetPhysicalDeviceFormatProperties2(hwctx->phys_dev, fmt[i], &prop);
|
2019-08-28 22:58:10 +02:00
|
|
|
flags = linear ? prop.formatProperties.linearTilingFeatures :
|
|
|
|
prop.formatProperties.optimalTilingFeatures;
|
2021-11-19 08:46:15 +02:00
|
|
|
if (!(flags & FF_VK_DEFAULT_USAGE_FLAGS))
|
2019-08-28 22:58:10 +02:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2021-04-29 03:37:42 +02:00
|
|
|
static int load_libvulkan(AVHWDeviceContext *ctx)
|
|
|
|
{
|
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
|
|
|
|
|
|
|
static const char *lib_names[] = {
|
|
|
|
#if defined(_WIN32)
|
|
|
|
"vulkan-1.dll",
|
|
|
|
#elif defined(__APPLE__)
|
|
|
|
"libvulkan.dylib",
|
|
|
|
"libvulkan.1.dylib",
|
|
|
|
"libMoltenVK.dylib",
|
|
|
|
#else
|
|
|
|
"libvulkan.so.1",
|
|
|
|
"libvulkan.so",
|
|
|
|
#endif
|
|
|
|
};
|
|
|
|
|
|
|
|
for (int i = 0; i < FF_ARRAY_ELEMS(lib_names); i++) {
|
|
|
|
p->libvulkan = dlopen(lib_names[i], RTLD_NOW | RTLD_LOCAL);
|
|
|
|
if (p->libvulkan)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!p->libvulkan) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to open the libvulkan library!\n");
|
|
|
|
return AVERROR_UNKNOWN;
|
|
|
|
}
|
|
|
|
|
|
|
|
hwctx->get_proc_addr = (PFN_vkGetInstanceProcAddr)dlsym(p->libvulkan, "vkGetInstanceProcAddr");
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
typedef struct VulkanOptExtension {
|
|
|
|
const char *name;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanExtensions flag;
|
2019-08-28 22:58:10 +02:00
|
|
|
} VulkanOptExtension;
|
|
|
|
|
|
|
|
static const VulkanOptExtension optional_instance_exts[] = {
|
2020-05-12 21:40:29 +02:00
|
|
|
/* For future use */
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
static const VulkanOptExtension optional_device_exts[] = {
|
2021-11-07 10:02:46 +02:00
|
|
|
/* Misc or required by other extensions */
|
2021-11-07 16:57:35 +02:00
|
|
|
{ VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
|
|
|
{ VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
|
|
|
{ VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
2021-11-07 10:02:46 +02:00
|
|
|
|
|
|
|
/* Imports/exports */
|
2021-11-07 16:57:35 +02:00
|
|
|
{ VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_FD_MEMORY },
|
|
|
|
{ VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_DMABUF_MEMORY },
|
|
|
|
{ VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME, FF_VK_EXT_DRM_MODIFIER_FLAGS },
|
|
|
|
{ VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_FD_SEM },
|
|
|
|
{ VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_HOST_MEMORY },
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
{ VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_WIN32_MEMORY },
|
|
|
|
{ VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME, FF_VK_EXT_EXTERNAL_WIN32_SEM },
|
|
|
|
#endif
|
2021-11-07 10:02:46 +02:00
|
|
|
|
|
|
|
/* Video encoding/decoding */
|
2021-11-07 16:57:35 +02:00
|
|
|
{ VK_KHR_VIDEO_QUEUE_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
|
|
|
{ VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
|
|
|
{ VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
|
|
|
{ VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
|
|
|
{ VK_EXT_VIDEO_DECODE_H264_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
|
|
|
{ VK_EXT_VIDEO_DECODE_H265_EXTENSION_NAME, FF_VK_EXT_NO_FLAG },
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
/* Converts return values to strings */
|
|
|
|
static const char *vk_ret2str(VkResult res)
|
|
|
|
{
|
|
|
|
#define CASE(VAL) case VAL: return #VAL
|
|
|
|
switch (res) {
|
|
|
|
CASE(VK_SUCCESS);
|
|
|
|
CASE(VK_NOT_READY);
|
|
|
|
CASE(VK_TIMEOUT);
|
|
|
|
CASE(VK_EVENT_SET);
|
|
|
|
CASE(VK_EVENT_RESET);
|
|
|
|
CASE(VK_INCOMPLETE);
|
|
|
|
CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
|
|
|
|
CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
|
|
|
|
CASE(VK_ERROR_INITIALIZATION_FAILED);
|
|
|
|
CASE(VK_ERROR_DEVICE_LOST);
|
|
|
|
CASE(VK_ERROR_MEMORY_MAP_FAILED);
|
|
|
|
CASE(VK_ERROR_LAYER_NOT_PRESENT);
|
|
|
|
CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
|
|
|
|
CASE(VK_ERROR_FEATURE_NOT_PRESENT);
|
|
|
|
CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
|
|
|
|
CASE(VK_ERROR_TOO_MANY_OBJECTS);
|
|
|
|
CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
|
|
|
|
CASE(VK_ERROR_FRAGMENTED_POOL);
|
|
|
|
CASE(VK_ERROR_SURFACE_LOST_KHR);
|
|
|
|
CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
|
|
|
|
CASE(VK_SUBOPTIMAL_KHR);
|
|
|
|
CASE(VK_ERROR_OUT_OF_DATE_KHR);
|
|
|
|
CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
|
|
|
|
CASE(VK_ERROR_VALIDATION_FAILED_EXT);
|
|
|
|
CASE(VK_ERROR_INVALID_SHADER_NV);
|
|
|
|
CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
|
|
|
|
CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
|
|
|
|
CASE(VK_ERROR_NOT_PERMITTED_EXT);
|
|
|
|
CASE(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
|
|
|
|
CASE(VK_ERROR_INVALID_DEVICE_ADDRESS_EXT);
|
|
|
|
CASE(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
|
|
|
|
default: return "Unknown error";
|
|
|
|
}
|
|
|
|
#undef CASE
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkBool32 vk_dbg_callback(VkDebugUtilsMessageSeverityFlagBitsEXT severity,
|
|
|
|
VkDebugUtilsMessageTypeFlagsEXT messageType,
|
|
|
|
const VkDebugUtilsMessengerCallbackDataEXT *data,
|
|
|
|
void *priv)
|
|
|
|
{
|
|
|
|
int l;
|
|
|
|
AVHWDeviceContext *ctx = priv;
|
|
|
|
|
|
|
|
switch (severity) {
|
|
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: l = AV_LOG_VERBOSE; break;
|
|
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: l = AV_LOG_INFO; break;
|
|
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: l = AV_LOG_WARNING; break;
|
|
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: l = AV_LOG_ERROR; break;
|
|
|
|
default: l = AV_LOG_DEBUG; break;
|
|
|
|
}
|
|
|
|
|
|
|
|
av_log(ctx, l, "%s\n", data->pMessage);
|
|
|
|
for (int i = 0; i < data->cmdBufLabelCount; i++)
|
|
|
|
av_log(ctx, l, "\t%i: %s\n", i, data->pCmdBufLabels[i].pLabelName);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-05-10 12:26:40 +02:00
|
|
|
static int check_extensions(AVHWDeviceContext *ctx, int dev, AVDictionary *opts,
|
2019-08-28 22:58:10 +02:00
|
|
|
const char * const **dst, uint32_t *num, int debug)
|
|
|
|
{
|
|
|
|
const char *tstr;
|
|
|
|
const char **extension_names = NULL;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
int err = 0, found, extensions_found = 0;
|
|
|
|
|
|
|
|
const char *mod;
|
|
|
|
int optional_exts_num;
|
|
|
|
uint32_t sup_ext_count;
|
2020-05-10 12:26:40 +02:00
|
|
|
char *user_exts_str = NULL;
|
|
|
|
AVDictionaryEntry *user_exts;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkExtensionProperties *sup_ext;
|
|
|
|
const VulkanOptExtension *optional_exts;
|
|
|
|
|
|
|
|
if (!dev) {
|
|
|
|
mod = "instance";
|
|
|
|
optional_exts = optional_instance_exts;
|
|
|
|
optional_exts_num = FF_ARRAY_ELEMS(optional_instance_exts);
|
2020-05-10 12:26:40 +02:00
|
|
|
user_exts = av_dict_get(opts, "instance_extensions", NULL, 0);
|
|
|
|
if (user_exts) {
|
|
|
|
user_exts_str = av_strdup(user_exts->value);
|
|
|
|
if (!user_exts_str) {
|
|
|
|
err = AVERROR(ENOMEM);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
}
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->EnumerateInstanceExtensionProperties(NULL, &sup_ext_count, NULL);
|
2019-08-28 22:58:10 +02:00
|
|
|
sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
|
|
|
|
if (!sup_ext)
|
|
|
|
return AVERROR(ENOMEM);
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->EnumerateInstanceExtensionProperties(NULL, &sup_ext_count, sup_ext);
|
2019-08-28 22:58:10 +02:00
|
|
|
} else {
|
|
|
|
mod = "device";
|
|
|
|
optional_exts = optional_device_exts;
|
|
|
|
optional_exts_num = FF_ARRAY_ELEMS(optional_device_exts);
|
2020-05-10 12:26:40 +02:00
|
|
|
user_exts = av_dict_get(opts, "device_extensions", NULL, 0);
|
|
|
|
if (user_exts) {
|
|
|
|
user_exts_str = av_strdup(user_exts->value);
|
|
|
|
if (!user_exts_str) {
|
|
|
|
err = AVERROR(ENOMEM);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
}
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->EnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
|
|
|
|
&sup_ext_count, NULL);
|
2019-08-28 22:58:10 +02:00
|
|
|
sup_ext = av_malloc_array(sup_ext_count, sizeof(VkExtensionProperties));
|
|
|
|
if (!sup_ext)
|
|
|
|
return AVERROR(ENOMEM);
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->EnumerateDeviceExtensionProperties(hwctx->phys_dev, NULL,
|
|
|
|
&sup_ext_count, sup_ext);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < optional_exts_num; i++) {
|
|
|
|
tstr = optional_exts[i].name;
|
|
|
|
found = 0;
|
|
|
|
for (int j = 0; j < sup_ext_count; j++) {
|
|
|
|
if (!strcmp(tstr, sup_ext[j].extensionName)) {
|
|
|
|
found = 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2020-05-10 16:17:27 +02:00
|
|
|
if (!found)
|
2019-08-28 22:58:10 +02:00
|
|
|
continue;
|
|
|
|
|
2021-11-07 10:02:46 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Using %s extension %s\n", mod, tstr);
|
2020-05-10 16:17:27 +02:00
|
|
|
p->extensions |= optional_exts[i].flag;
|
2019-08-28 22:58:10 +02:00
|
|
|
ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (debug && !dev) {
|
|
|
|
tstr = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
|
|
|
|
found = 0;
|
|
|
|
for (int j = 0; j < sup_ext_count; j++) {
|
|
|
|
if (!strcmp(tstr, sup_ext[j].extensionName)) {
|
|
|
|
found = 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (found) {
|
2021-11-07 10:02:46 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Using %s extension %s\n", mod, tstr);
|
2019-08-28 22:58:10 +02:00
|
|
|
ADD_VAL_TO_LIST(extension_names, extensions_found, tstr);
|
2021-11-07 16:57:35 +02:00
|
|
|
p->extensions |= FF_VK_EXT_DEBUG_UTILS;
|
2019-08-28 22:58:10 +02:00
|
|
|
} else {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Debug extension \"%s\" not found!\n",
|
|
|
|
tstr);
|
|
|
|
err = AVERROR(EINVAL);
|
2020-05-10 12:26:40 +02:00
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (user_exts_str) {
|
|
|
|
char *save, *token = av_strtok(user_exts_str, "+", &save);
|
|
|
|
while (token) {
|
|
|
|
found = 0;
|
|
|
|
for (int j = 0; j < sup_ext_count; j++) {
|
|
|
|
if (!strcmp(token, sup_ext[j].extensionName)) {
|
|
|
|
found = 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (found) {
|
2020-05-14 19:22:55 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Using %s extension \"%s\"\n", mod, token);
|
2020-05-10 12:26:40 +02:00
|
|
|
ADD_VAL_TO_LIST(extension_names, extensions_found, token);
|
|
|
|
} else {
|
2020-05-12 21:08:46 +02:00
|
|
|
av_log(ctx, AV_LOG_WARNING, "%s extension \"%s\" not found, excluding.\n",
|
2020-05-10 12:26:40 +02:00
|
|
|
mod, token);
|
|
|
|
}
|
|
|
|
token = av_strtok(NULL, "+", &save);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
*dst = extension_names;
|
|
|
|
*num = extensions_found;
|
|
|
|
|
2020-05-10 12:26:40 +02:00
|
|
|
av_free(user_exts_str);
|
|
|
|
av_free(sup_ext);
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
fail:
|
2021-11-26 11:10:56 +02:00
|
|
|
RELEASE_PROPS(extension_names, extensions_found);
|
2020-05-10 12:26:40 +02:00
|
|
|
av_free(user_exts_str);
|
2019-08-28 22:58:10 +02:00
|
|
|
av_free(sup_ext);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2021-11-26 11:10:56 +02:00
|
|
|
static int check_validation_layers(AVHWDeviceContext *ctx, AVDictionary *opts,
|
|
|
|
const char * const **dst, uint32_t *num,
|
|
|
|
int *debug_mode)
|
|
|
|
{
|
|
|
|
static const char default_layer[] = { "VK_LAYER_KHRONOS_validation" };
|
|
|
|
|
|
|
|
int found = 0, err = 0;
|
|
|
|
VulkanDevicePriv *priv = ctx->internal->priv;
|
|
|
|
FFVulkanFunctions *vk = &priv->vkfn;
|
|
|
|
|
|
|
|
uint32_t sup_layer_count;
|
|
|
|
VkLayerProperties *sup_layers;
|
|
|
|
|
|
|
|
AVDictionaryEntry *user_layers;
|
|
|
|
char *user_layers_str = NULL;
|
|
|
|
char *save, *token;
|
|
|
|
|
|
|
|
const char **enabled_layers = NULL;
|
|
|
|
uint32_t enabled_layers_count = 0;
|
|
|
|
|
|
|
|
AVDictionaryEntry *debug_opt = av_dict_get(opts, "debug", NULL, 0);
|
|
|
|
int debug = debug_opt && strtol(debug_opt->value, NULL, 10);
|
|
|
|
|
|
|
|
/* If `debug=0`, enable no layers at all. */
|
|
|
|
if (debug_opt && !debug)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
vk->EnumerateInstanceLayerProperties(&sup_layer_count, NULL);
|
|
|
|
sup_layers = av_malloc_array(sup_layer_count, sizeof(VkLayerProperties));
|
|
|
|
if (!sup_layers)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
vk->EnumerateInstanceLayerProperties(&sup_layer_count, sup_layers);
|
|
|
|
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Supported validation layers:\n");
|
|
|
|
for (int i = 0; i < sup_layer_count; i++)
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "\t%s\n", sup_layers[i].layerName);
|
|
|
|
|
|
|
|
/* If `debug=1` is specified, enable the standard validation layer extension */
|
|
|
|
if (debug) {
|
|
|
|
*debug_mode = debug;
|
|
|
|
for (int i = 0; i < sup_layer_count; i++) {
|
|
|
|
if (!strcmp(default_layer, sup_layers[i].layerName)) {
|
|
|
|
found = 1;
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Default validation layer %s is enabled\n",
|
|
|
|
default_layer);
|
|
|
|
ADD_VAL_TO_LIST(enabled_layers, enabled_layers_count, default_layer);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
user_layers = av_dict_get(opts, "validation_layers", NULL, 0);
|
|
|
|
if (!user_layers)
|
|
|
|
goto end;
|
|
|
|
|
|
|
|
user_layers_str = av_strdup(user_layers->value);
|
|
|
|
if (!user_layers_str) {
|
2021-11-27 05:38:27 +02:00
|
|
|
err = AVERROR(ENOMEM);
|
2021-11-26 11:10:56 +02:00
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
token = av_strtok(user_layers_str, "+", &save);
|
|
|
|
while (token) {
|
|
|
|
found = 0;
|
|
|
|
if (!strcmp(default_layer, token)) {
|
|
|
|
if (debug) {
|
|
|
|
/* if the `debug=1`, default_layer is enabled, skip here */
|
|
|
|
token = av_strtok(NULL, "+", &save);
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
/* if the `debug=0`, enable debug mode to load its callback properly */
|
|
|
|
*debug_mode = debug;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for (int j = 0; j < sup_layer_count; j++) {
|
|
|
|
if (!strcmp(token, sup_layers[j].layerName)) {
|
|
|
|
found = 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (found) {
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Requested Validation Layer: %s\n", token);
|
|
|
|
ADD_VAL_TO_LIST(enabled_layers, enabled_layers_count, token);
|
|
|
|
} else {
|
|
|
|
av_log(ctx, AV_LOG_ERROR,
|
|
|
|
"Validation Layer \"%s\" not support.\n", token);
|
|
|
|
err = AVERROR(EINVAL);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
token = av_strtok(NULL, "+", &save);
|
|
|
|
}
|
|
|
|
|
|
|
|
av_free(user_layers_str);
|
|
|
|
|
|
|
|
end:
|
|
|
|
av_free(sup_layers);
|
|
|
|
|
|
|
|
*dst = enabled_layers;
|
|
|
|
*num = enabled_layers_count;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
fail:
|
|
|
|
RELEASE_PROPS(enabled_layers, enabled_layers_count);
|
|
|
|
av_free(sup_layers);
|
|
|
|
av_free(user_layers_str);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* Creates a VkInstance */
|
|
|
|
static int create_instance(AVHWDeviceContext *ctx, AVDictionary *opts)
|
|
|
|
{
|
2021-11-26 11:10:56 +02:00
|
|
|
int err = 0, debug_mode = 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkResult ret;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
VkApplicationInfo application_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
|
|
|
|
.pEngineName = "libavutil",
|
2021-10-11 00:20:15 +02:00
|
|
|
.apiVersion = VK_API_VERSION_1_2,
|
2019-08-28 22:58:10 +02:00
|
|
|
.engineVersion = VK_MAKE_VERSION(LIBAVUTIL_VERSION_MAJOR,
|
|
|
|
LIBAVUTIL_VERSION_MINOR,
|
|
|
|
LIBAVUTIL_VERSION_MICRO),
|
|
|
|
};
|
|
|
|
VkInstanceCreateInfo inst_props = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
|
|
|
|
.pApplicationInfo = &application_info,
|
|
|
|
};
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
if (!hwctx->get_proc_addr) {
|
2021-04-29 03:37:42 +02:00
|
|
|
err = load_libvulkan(ctx);
|
|
|
|
if (err < 0)
|
|
|
|
return err;
|
2021-04-29 02:44:41 +02:00
|
|
|
}
|
|
|
|
|
2021-11-07 16:57:35 +02:00
|
|
|
err = ff_vk_load_functions(ctx, vk, p->extensions, 0, 0);
|
2021-04-29 02:44:41 +02:00
|
|
|
if (err < 0) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to load instance enumeration functions!\n");
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2021-11-26 11:10:56 +02:00
|
|
|
err = check_validation_layers(ctx, opts, &inst_props.ppEnabledLayerNames,
|
|
|
|
&inst_props.enabledLayerCount, &debug_mode);
|
|
|
|
if (err)
|
|
|
|
goto fail;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* Check for present/missing extensions */
|
2020-05-10 12:26:40 +02:00
|
|
|
err = check_extensions(ctx, 0, opts, &inst_props.ppEnabledExtensionNames,
|
2019-08-28 22:58:10 +02:00
|
|
|
&inst_props.enabledExtensionCount, debug_mode);
|
2021-11-26 11:10:56 +02:00
|
|
|
hwctx->enabled_inst_extensions = inst_props.ppEnabledExtensionNames;
|
|
|
|
hwctx->nb_enabled_inst_extensions = inst_props.enabledExtensionCount;
|
2019-08-28 22:58:10 +02:00
|
|
|
if (err < 0)
|
2021-11-26 11:10:56 +02:00
|
|
|
goto fail;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Try to create the instance */
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateInstance(&inst_props, hwctx->alloc, &hwctx->inst);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Check for errors */
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Instance creation failure: %s\n",
|
|
|
|
vk_ret2str(ret));
|
2021-11-26 11:10:56 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2021-11-07 16:57:35 +02:00
|
|
|
err = ff_vk_load_functions(ctx, vk, p->extensions, 1, 0);
|
2021-04-29 02:44:41 +02:00
|
|
|
if (err < 0) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to load instance functions!\n");
|
2021-11-26 11:10:56 +02:00
|
|
|
goto fail;
|
2021-04-29 02:44:41 +02:00
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
if (debug_mode) {
|
|
|
|
VkDebugUtilsMessengerCreateInfoEXT dbg = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
|
|
|
|
.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
|
|
|
|
VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
|
|
|
|
VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
|
|
|
|
VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
|
|
|
|
.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
|
|
|
|
VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
|
|
|
|
VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
|
|
|
|
.pfnUserCallback = vk_dbg_callback,
|
|
|
|
.pUserData = ctx,
|
|
|
|
};
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->CreateDebugUtilsMessengerEXT(hwctx->inst, &dbg,
|
|
|
|
hwctx->alloc, &p->debug_ctx);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2021-11-26 11:10:56 +02:00
|
|
|
err = 0;
|
2020-05-10 12:47:50 +02:00
|
|
|
|
2021-11-26 11:10:56 +02:00
|
|
|
fail:
|
|
|
|
RELEASE_PROPS(inst_props.ppEnabledLayerNames, inst_props.enabledLayerCount);
|
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
typedef struct VulkanDeviceSelection {
|
|
|
|
uint8_t uuid[VK_UUID_SIZE]; /* Will use this first unless !has_uuid */
|
|
|
|
int has_uuid;
|
|
|
|
const char *name; /* Will use this second unless NULL */
|
|
|
|
uint32_t pci_device; /* Will use this third unless 0x0 */
|
|
|
|
uint32_t vendor_id; /* Last resort to find something deterministic */
|
|
|
|
int index; /* Finally fall back to index */
|
|
|
|
} VulkanDeviceSelection;
|
|
|
|
|
|
|
|
static const char *vk_dev_type(enum VkPhysicalDeviceType type)
|
|
|
|
{
|
|
|
|
switch (type) {
|
|
|
|
case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU: return "integrated";
|
|
|
|
case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU: return "discrete";
|
|
|
|
case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU: return "virtual";
|
|
|
|
case VK_PHYSICAL_DEVICE_TYPE_CPU: return "software";
|
|
|
|
default: return "unknown";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Finds a device */
|
|
|
|
static int find_device(AVHWDeviceContext *ctx, VulkanDeviceSelection *select)
|
|
|
|
{
|
|
|
|
int err = 0, choice = -1;
|
|
|
|
uint32_t num;
|
|
|
|
VkResult ret;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkPhysicalDevice *devices = NULL;
|
|
|
|
VkPhysicalDeviceIDProperties *idp = NULL;
|
|
|
|
VkPhysicalDeviceProperties2 *prop = NULL;
|
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->EnumeratePhysicalDevices(hwctx->inst, &num, NULL);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS || !num) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "No devices found: %s!\n", vk_ret2str(ret));
|
|
|
|
return AVERROR(ENODEV);
|
|
|
|
}
|
|
|
|
|
|
|
|
devices = av_malloc_array(num, sizeof(VkPhysicalDevice));
|
|
|
|
if (!devices)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->EnumeratePhysicalDevices(hwctx->inst, &num, devices);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed enumerating devices: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR(ENODEV);
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
2021-09-14 21:31:53 +02:00
|
|
|
prop = av_calloc(num, sizeof(*prop));
|
2019-08-28 22:58:10 +02:00
|
|
|
if (!prop) {
|
|
|
|
err = AVERROR(ENOMEM);
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
2021-09-14 21:31:53 +02:00
|
|
|
idp = av_calloc(num, sizeof(*idp));
|
2019-08-28 22:58:10 +02:00
|
|
|
if (!idp) {
|
|
|
|
err = AVERROR(ENOMEM);
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "GPU listing:\n");
|
|
|
|
for (int i = 0; i < num; i++) {
|
|
|
|
idp[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
|
|
|
|
prop[i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
|
|
|
|
prop[i].pNext = &idp[i];
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetPhysicalDeviceProperties2(devices[i], &prop[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, " %d: %s (%s) (0x%x)\n", i,
|
|
|
|
prop[i].properties.deviceName,
|
|
|
|
vk_dev_type(prop[i].properties.deviceType),
|
|
|
|
prop[i].properties.deviceID);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (select->has_uuid) {
|
|
|
|
for (int i = 0; i < num; i++) {
|
|
|
|
if (!strncmp(idp[i].deviceUUID, select->uuid, VK_UUID_SIZE)) {
|
|
|
|
choice = i;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to find device by given UUID!\n");
|
|
|
|
err = AVERROR(ENODEV);
|
|
|
|
goto end;
|
|
|
|
} else if (select->name) {
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Requested device: %s\n", select->name);
|
|
|
|
for (int i = 0; i < num; i++) {
|
|
|
|
if (strstr(prop[i].properties.deviceName, select->name)) {
|
|
|
|
choice = i;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to find device \"%s\"!\n",
|
|
|
|
select->name);
|
|
|
|
err = AVERROR(ENODEV);
|
|
|
|
goto end;
|
|
|
|
} else if (select->pci_device) {
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Requested device: 0x%x\n", select->pci_device);
|
|
|
|
for (int i = 0; i < num; i++) {
|
|
|
|
if (select->pci_device == prop[i].properties.deviceID) {
|
|
|
|
choice = i;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to find device with PCI ID 0x%x!\n",
|
|
|
|
select->pci_device);
|
|
|
|
err = AVERROR(EINVAL);
|
|
|
|
goto end;
|
|
|
|
} else if (select->vendor_id) {
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Requested vendor: 0x%x\n", select->vendor_id);
|
|
|
|
for (int i = 0; i < num; i++) {
|
|
|
|
if (select->vendor_id == prop[i].properties.vendorID) {
|
|
|
|
choice = i;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to find device with Vendor ID 0x%x!\n",
|
|
|
|
select->vendor_id);
|
|
|
|
err = AVERROR(ENODEV);
|
|
|
|
goto end;
|
|
|
|
} else {
|
|
|
|
if (select->index < num) {
|
|
|
|
choice = select->index;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to find device with index %i!\n",
|
|
|
|
select->index);
|
|
|
|
err = AVERROR(ENODEV);
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
|
|
|
end:
|
2021-11-07 09:18:54 +02:00
|
|
|
if (choice > -1) {
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Device %d selected: %s (%s) (0x%x)\n",
|
|
|
|
choice, prop[choice].properties.deviceName,
|
|
|
|
vk_dev_type(prop[choice].properties.deviceType),
|
|
|
|
prop[choice].properties.deviceID);
|
2019-08-28 22:58:10 +02:00
|
|
|
hwctx->phys_dev = devices[choice];
|
2021-11-07 09:18:54 +02:00
|
|
|
}
|
2020-05-15 14:21:10 +02:00
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
av_free(devices);
|
|
|
|
av_free(prop);
|
|
|
|
av_free(idp);
|
|
|
|
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
/* Picks the least used qf with the fewest unneeded flags, or -1 if none found */
|
|
|
|
static inline int pick_queue_family(VkQueueFamilyProperties *qf, uint32_t num_qf,
|
|
|
|
VkQueueFlagBits flags)
|
|
|
|
{
|
|
|
|
int index = -1;
|
|
|
|
uint32_t min_score = UINT32_MAX;
|
|
|
|
|
|
|
|
for (int i = 0; i < num_qf; i++) {
|
|
|
|
const VkQueueFlagBits qflags = qf[i].queueFlags;
|
|
|
|
if (qflags & flags) {
|
|
|
|
uint32_t score = av_popcount(qflags) + qf[i].timestampValidBits;
|
|
|
|
if (score < min_score) {
|
|
|
|
index = i;
|
|
|
|
min_score = score;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (index > -1)
|
|
|
|
qf[index].timestampValidBits++;
|
|
|
|
|
|
|
|
return index;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int setup_queue_families(AVHWDeviceContext *ctx, VkDeviceCreateInfo *cd)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
uint32_t num;
|
2020-05-12 21:01:25 +02:00
|
|
|
float *weights;
|
2021-11-07 09:16:11 +02:00
|
|
|
VkQueueFamilyProperties *qf = NULL;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
2021-11-07 09:16:11 +02:00
|
|
|
int graph_index, comp_index, tx_index, enc_index, dec_index;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* First get the number of queue families */
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, NULL);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (!num) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Then allocate memory */
|
2021-11-07 09:16:11 +02:00
|
|
|
qf = av_malloc_array(num, sizeof(VkQueueFamilyProperties));
|
|
|
|
if (!qf)
|
2019-08-28 22:58:10 +02:00
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
/* Finally retrieve the queue families */
|
2021-11-07 09:16:11 +02:00
|
|
|
vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &num, qf);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Queue families:\n");
|
|
|
|
for (int i = 0; i < num; i++) {
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, " %i:%s%s%s%s%s%s%s (queues: %i)\n", i,
|
|
|
|
((qf[i].queueFlags) & VK_QUEUE_GRAPHICS_BIT) ? " graphics" : "",
|
|
|
|
((qf[i].queueFlags) & VK_QUEUE_COMPUTE_BIT) ? " compute" : "",
|
|
|
|
((qf[i].queueFlags) & VK_QUEUE_TRANSFER_BIT) ? " transfer" : "",
|
|
|
|
((qf[i].queueFlags) & VK_QUEUE_VIDEO_ENCODE_BIT_KHR) ? " encode" : "",
|
|
|
|
((qf[i].queueFlags) & VK_QUEUE_VIDEO_DECODE_BIT_KHR) ? " decode" : "",
|
|
|
|
((qf[i].queueFlags) & VK_QUEUE_SPARSE_BINDING_BIT) ? " sparse" : "",
|
|
|
|
((qf[i].queueFlags) & VK_QUEUE_PROTECTED_BIT) ? " protected" : "",
|
|
|
|
qf[i].queueCount);
|
|
|
|
|
|
|
|
/* We use this field to keep a score of how many times we've used that
|
|
|
|
* queue family in order to make better choices. */
|
|
|
|
qf[i].timestampValidBits = 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
/* Pick each queue family to use */
|
|
|
|
graph_index = pick_queue_family(qf, num, VK_QUEUE_GRAPHICS_BIT);
|
|
|
|
comp_index = pick_queue_family(qf, num, VK_QUEUE_COMPUTE_BIT);
|
|
|
|
tx_index = pick_queue_family(qf, num, VK_QUEUE_TRANSFER_BIT);
|
|
|
|
enc_index = pick_queue_family(qf, num, VK_QUEUE_VIDEO_ENCODE_BIT_KHR);
|
|
|
|
dec_index = pick_queue_family(qf, num, VK_QUEUE_VIDEO_DECODE_BIT_KHR);
|
|
|
|
|
2021-11-20 03:36:21 +02:00
|
|
|
/* Signalling the transfer capabilities on a queue family is optional */
|
|
|
|
if (tx_index < 0) {
|
|
|
|
tx_index = pick_queue_family(qf, num, VK_QUEUE_COMPUTE_BIT);
|
|
|
|
if (tx_index < 0)
|
|
|
|
tx_index = pick_queue_family(qf, num, VK_QUEUE_GRAPHICS_BIT);
|
|
|
|
}
|
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
hwctx->queue_family_index = -1;
|
|
|
|
hwctx->queue_family_comp_index = -1;
|
|
|
|
hwctx->queue_family_tx_index = -1;
|
|
|
|
hwctx->queue_family_encode_index = -1;
|
|
|
|
hwctx->queue_family_decode_index = -1;
|
|
|
|
|
|
|
|
#define SETUP_QUEUE(qf_idx) \
|
|
|
|
if (qf_idx > -1) { \
|
|
|
|
int fidx = qf_idx; \
|
|
|
|
int qc = qf[fidx].queueCount; \
|
|
|
|
VkDeviceQueueCreateInfo *pc; \
|
|
|
|
\
|
|
|
|
if (fidx == graph_index) { \
|
|
|
|
hwctx->queue_family_index = fidx; \
|
|
|
|
hwctx->nb_graphics_queues = qc; \
|
|
|
|
graph_index = -1; \
|
|
|
|
} \
|
|
|
|
if (fidx == comp_index) { \
|
|
|
|
hwctx->queue_family_comp_index = fidx; \
|
|
|
|
hwctx->nb_comp_queues = qc; \
|
|
|
|
comp_index = -1; \
|
|
|
|
} \
|
|
|
|
if (fidx == tx_index) { \
|
|
|
|
hwctx->queue_family_tx_index = fidx; \
|
|
|
|
hwctx->nb_tx_queues = qc; \
|
|
|
|
tx_index = -1; \
|
|
|
|
} \
|
|
|
|
if (fidx == enc_index) { \
|
|
|
|
hwctx->queue_family_encode_index = fidx; \
|
|
|
|
hwctx->nb_encode_queues = qc; \
|
|
|
|
enc_index = -1; \
|
|
|
|
} \
|
|
|
|
if (fidx == dec_index) { \
|
|
|
|
hwctx->queue_family_decode_index = fidx; \
|
|
|
|
hwctx->nb_decode_queues = qc; \
|
|
|
|
dec_index = -1; \
|
|
|
|
} \
|
|
|
|
\
|
|
|
|
pc = av_realloc((void *)cd->pQueueCreateInfos, \
|
|
|
|
sizeof(*pc) * (cd->queueCreateInfoCount + 1)); \
|
|
|
|
if (!pc) { \
|
|
|
|
av_free(qf); \
|
|
|
|
return AVERROR(ENOMEM); \
|
|
|
|
} \
|
|
|
|
cd->pQueueCreateInfos = pc; \
|
|
|
|
pc = &pc[cd->queueCreateInfoCount]; \
|
|
|
|
\
|
|
|
|
weights = av_malloc(qc * sizeof(float)); \
|
|
|
|
if (!weights) { \
|
|
|
|
av_free(qf); \
|
|
|
|
return AVERROR(ENOMEM); \
|
|
|
|
} \
|
|
|
|
\
|
|
|
|
memset(pc, 0, sizeof(*pc)); \
|
|
|
|
pc->sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; \
|
|
|
|
pc->queueFamilyIndex = fidx; \
|
|
|
|
pc->queueCount = qc; \
|
|
|
|
pc->pQueuePriorities = weights; \
|
|
|
|
\
|
|
|
|
for (int i = 0; i < qc; i++) \
|
|
|
|
weights[i] = 1.0f / qc; \
|
|
|
|
\
|
|
|
|
cd->queueCreateInfoCount++; \
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
SETUP_QUEUE(graph_index)
|
|
|
|
SETUP_QUEUE(comp_index)
|
|
|
|
SETUP_QUEUE(tx_index)
|
|
|
|
SETUP_QUEUE(enc_index)
|
|
|
|
SETUP_QUEUE(dec_index)
|
|
|
|
|
2022-03-09 19:02:38 +02:00
|
|
|
#undef SETUP_QUEUE
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
av_free(qf);
|
2020-05-12 21:01:25 +02:00
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
return 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static int create_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
|
2020-05-14 01:28:00 +02:00
|
|
|
int queue_family_index, int num_queues)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
VkResult ret;
|
2020-05-15 01:01:08 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
VkCommandPoolCreateInfo cqueue_create = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
|
|
|
|
.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
|
|
|
|
.queueFamilyIndex = queue_family_index,
|
|
|
|
};
|
|
|
|
VkCommandBufferAllocateInfo cbuf_create = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
|
|
|
|
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
|
2020-05-14 01:28:00 +02:00
|
|
|
.commandBufferCount = num_queues,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
2020-05-14 01:28:00 +02:00
|
|
|
cmd->nb_queues = num_queues;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-05-14 01:28:00 +02:00
|
|
|
/* Create command pool */
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateCommandPool(hwctx->act_dev, &cqueue_create,
|
|
|
|
hwctx->alloc, &cmd->pool);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
2020-05-15 01:01:08 +02:00
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Command pool creation failure: %s\n",
|
2019-08-28 22:58:10 +02:00
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
2020-11-25 13:06:00 +02:00
|
|
|
cmd->bufs = av_mallocz(num_queues * sizeof(*cmd->bufs));
|
|
|
|
if (!cmd->bufs)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
cbuf_create.commandPool = cmd->pool;
|
|
|
|
|
2020-05-14 01:28:00 +02:00
|
|
|
/* Allocate command buffer */
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->AllocateCommandBuffers(hwctx->act_dev, &cbuf_create, cmd->bufs);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
2020-05-15 01:01:08 +02:00
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Command buffer alloc failure: %s\n",
|
2019-08-28 22:58:10 +02:00
|
|
|
vk_ret2str(ret));
|
2020-11-25 13:06:00 +02:00
|
|
|
av_freep(&cmd->bufs);
|
2019-08-28 22:58:10 +02:00
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
2020-11-25 13:06:00 +02:00
|
|
|
cmd->queues = av_mallocz(num_queues * sizeof(*cmd->queues));
|
|
|
|
if (!cmd->queues)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
2020-05-14 01:28:00 +02:00
|
|
|
for (int i = 0; i < num_queues; i++) {
|
|
|
|
VulkanQueueCtx *q = &cmd->queues[i];
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetDeviceQueue(hwctx->act_dev, queue_family_index, i, &q->queue);
|
2020-05-14 01:28:00 +02:00
|
|
|
q->was_synchronous = 1;
|
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static void free_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
2020-05-15 01:01:08 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-25 13:06:00 +02:00
|
|
|
if (cmd->queues) {
|
|
|
|
for (int i = 0; i < cmd->nb_queues; i++) {
|
|
|
|
VulkanQueueCtx *q = &cmd->queues[i];
|
2020-05-14 01:28:00 +02:00
|
|
|
|
2020-11-25 13:06:00 +02:00
|
|
|
/* Make sure all queues have finished executing */
|
|
|
|
if (q->fence && !q->was_synchronous) {
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->WaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
|
|
|
|
vk->ResetFences(hwctx->act_dev, 1, &q->fence);
|
2020-11-25 13:06:00 +02:00
|
|
|
}
|
2020-05-14 01:28:00 +02:00
|
|
|
|
2020-11-25 13:06:00 +02:00
|
|
|
/* Free the fence */
|
|
|
|
if (q->fence)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->DestroyFence(hwctx->act_dev, q->fence, hwctx->alloc);
|
2020-05-14 01:28:00 +02:00
|
|
|
|
2020-11-25 13:06:00 +02:00
|
|
|
/* Free buffer dependencies */
|
|
|
|
for (int j = 0; j < q->nb_buf_deps; j++)
|
|
|
|
av_buffer_unref(&q->buf_deps[j]);
|
|
|
|
av_free(q->buf_deps);
|
|
|
|
}
|
2020-05-14 01:28:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if (cmd->bufs)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->FreeCommandBuffers(hwctx->act_dev, cmd->pool, cmd->nb_queues, cmd->bufs);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (cmd->pool)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->DestroyCommandPool(hwctx->act_dev, cmd->pool, hwctx->alloc);
|
2020-05-14 01:28:00 +02:00
|
|
|
|
|
|
|
av_freep(&cmd->queues);
|
2020-11-25 13:06:00 +02:00
|
|
|
av_freep(&cmd->bufs);
|
|
|
|
cmd->pool = NULL;
|
2020-05-14 01:28:00 +02:00
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static VkCommandBuffer get_buf_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
|
2020-05-14 01:28:00 +02:00
|
|
|
{
|
|
|
|
return cmd->bufs[cmd->cur_queue_idx];
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static void unref_exec_ctx_deps(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
|
2020-05-14 01:28:00 +02:00
|
|
|
{
|
|
|
|
VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
|
|
|
|
|
|
|
|
for (int j = 0; j < q->nb_buf_deps; j++)
|
|
|
|
av_buffer_unref(&q->buf_deps[j]);
|
|
|
|
q->nb_buf_deps = 0;
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static int wait_start_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd)
|
2020-05-14 01:28:00 +02:00
|
|
|
{
|
|
|
|
VkResult ret;
|
2020-05-15 01:01:08 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
2020-05-14 01:28:00 +02:00
|
|
|
VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2020-05-14 01:28:00 +02:00
|
|
|
|
|
|
|
VkCommandBufferBeginInfo cmd_start = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
|
|
|
|
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
|
|
|
};
|
|
|
|
|
|
|
|
/* Create the fence and don't wait for it initially */
|
|
|
|
if (!q->fence) {
|
|
|
|
VkFenceCreateInfo fence_spawn = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
|
|
|
|
};
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateFence(hwctx->act_dev, &fence_spawn, hwctx->alloc,
|
|
|
|
&q->fence);
|
2020-05-14 01:28:00 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
2020-05-15 01:01:08 +02:00
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Failed to queue frame fence: %s\n",
|
2020-05-14 01:28:00 +02:00
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
} else if (!q->was_synchronous) {
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->WaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
|
|
|
|
vk->ResetFences(hwctx->act_dev, 1, &q->fence);
|
2020-05-14 01:28:00 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Discard queue dependencies */
|
2020-05-15 01:01:08 +02:00
|
|
|
unref_exec_ctx_deps(hwfc, cmd);
|
2020-05-14 01:28:00 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->BeginCommandBuffer(cmd->bufs[cmd->cur_queue_idx], &cmd_start);
|
2020-05-14 01:28:00 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
2020-05-15 01:01:08 +02:00
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Unable to init command buffer: %s\n",
|
2020-05-14 01:28:00 +02:00
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static int add_buf_dep_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
|
2020-05-14 01:28:00 +02:00
|
|
|
AVBufferRef * const *deps, int nb_deps)
|
|
|
|
{
|
|
|
|
AVBufferRef **dst;
|
|
|
|
VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
|
|
|
|
|
|
|
|
if (!deps || !nb_deps)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
dst = av_fast_realloc(q->buf_deps, &q->buf_deps_alloc_size,
|
|
|
|
(q->nb_buf_deps + nb_deps) * sizeof(*dst));
|
|
|
|
if (!dst)
|
|
|
|
goto err;
|
|
|
|
|
|
|
|
q->buf_deps = dst;
|
|
|
|
|
|
|
|
for (int i = 0; i < nb_deps; i++) {
|
|
|
|
q->buf_deps[q->nb_buf_deps] = av_buffer_ref(deps[i]);
|
|
|
|
if (!q->buf_deps[q->nb_buf_deps])
|
|
|
|
goto err;
|
|
|
|
q->nb_buf_deps++;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
err:
|
2020-05-15 01:01:08 +02:00
|
|
|
unref_exec_ctx_deps(hwfc, cmd);
|
2020-05-14 01:28:00 +02:00
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static int submit_exec_ctx(AVHWFramesContext *hwfc, VulkanExecCtx *cmd,
|
2021-11-04 13:17:06 +02:00
|
|
|
VkSubmitInfo *s_info, AVVkFrame *f, int synchronous)
|
2020-05-14 01:28:00 +02:00
|
|
|
{
|
|
|
|
VkResult ret;
|
|
|
|
VulkanQueueCtx *q = &cmd->queues[cmd->cur_queue_idx];
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2020-05-14 01:28:00 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->EndCommandBuffer(cmd->bufs[cmd->cur_queue_idx]);
|
2020-05-14 01:28:00 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
2020-05-15 01:01:08 +02:00
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Unable to finish command buffer: %s\n",
|
2020-05-14 01:28:00 +02:00
|
|
|
vk_ret2str(ret));
|
2020-05-15 01:01:08 +02:00
|
|
|
unref_exec_ctx_deps(hwfc, cmd);
|
2020-05-14 01:28:00 +02:00
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
s_info->pCommandBuffers = &cmd->bufs[cmd->cur_queue_idx];
|
|
|
|
s_info->commandBufferCount = 1;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->QueueSubmit(q->queue, 1, s_info, q->fence);
|
2020-05-14 01:28:00 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
2021-11-13 15:21:36 +02:00
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Queue submission failure: %s\n",
|
|
|
|
vk_ret2str(ret));
|
2020-05-15 01:01:08 +02:00
|
|
|
unref_exec_ctx_deps(hwfc, cmd);
|
2020-05-14 01:28:00 +02:00
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
if (f)
|
|
|
|
for (int i = 0; i < s_info->signalSemaphoreCount; i++)
|
|
|
|
f->sem_value[i]++;
|
|
|
|
|
2020-05-14 01:28:00 +02:00
|
|
|
q->was_synchronous = synchronous;
|
|
|
|
|
|
|
|
if (synchronous) {
|
2020-05-15 01:01:08 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->WaitForFences(hwctx->act_dev, 1, &q->fence, VK_TRUE, UINT64_MAX);
|
|
|
|
vk->ResetFences(hwctx->act_dev, 1, &q->fence);
|
2020-05-15 01:01:08 +02:00
|
|
|
unref_exec_ctx_deps(hwfc, cmd);
|
2020-05-14 01:28:00 +02:00
|
|
|
} else { /* Rotate queues */
|
|
|
|
cmd->cur_queue_idx = (cmd->cur_queue_idx + 1) % cmd->nb_queues;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static void vulkan_device_free(AVHWDeviceContext *ctx)
|
|
|
|
{
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
|
2021-11-24 06:11:52 +02:00
|
|
|
if (hwctx->act_dev)
|
|
|
|
vk->DestroyDevice(hwctx->act_dev, hwctx->alloc);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
if (p->debug_ctx)
|
|
|
|
vk->DestroyDebugUtilsMessengerEXT(hwctx->inst, p->debug_ctx,
|
|
|
|
hwctx->alloc);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-24 06:11:53 +02:00
|
|
|
if (hwctx->inst)
|
|
|
|
vk->DestroyInstance(hwctx->inst, hwctx->alloc);
|
2020-05-10 12:47:50 +02:00
|
|
|
|
2021-04-29 03:37:42 +02:00
|
|
|
if (p->libvulkan)
|
|
|
|
dlclose(p->libvulkan);
|
|
|
|
|
2021-11-26 11:10:56 +02:00
|
|
|
RELEASE_PROPS(hwctx->enabled_inst_extensions, hwctx->nb_enabled_inst_extensions);
|
|
|
|
RELEASE_PROPS(hwctx->enabled_dev_extensions, hwctx->nb_enabled_dev_extensions);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_device_create_internal(AVHWDeviceContext *ctx,
|
|
|
|
VulkanDeviceSelection *dev_select,
|
|
|
|
AVDictionary *opts, int flags)
|
|
|
|
{
|
|
|
|
int err = 0;
|
|
|
|
VkResult ret;
|
|
|
|
AVDictionaryEntry *opt_d;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
2022-01-27 05:39:45 +02:00
|
|
|
|
|
|
|
/*
|
|
|
|
* VkPhysicalDeviceVulkan12Features has a timelineSemaphore field, but
|
|
|
|
* MoltenVK doesn't implement VkPhysicalDeviceVulkan12Features yet, so we
|
|
|
|
* use VkPhysicalDeviceTimelineSemaphoreFeatures directly.
|
|
|
|
*/
|
|
|
|
VkPhysicalDeviceTimelineSemaphoreFeatures timeline_features = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
|
|
|
|
};
|
2021-11-04 13:17:06 +02:00
|
|
|
VkPhysicalDeviceVulkan12Features dev_features_1_2 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
|
2022-01-27 05:39:45 +02:00
|
|
|
.pNext = &timeline_features,
|
2021-11-04 13:17:06 +02:00
|
|
|
};
|
|
|
|
VkPhysicalDeviceVulkan11Features dev_features_1_1 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
|
|
|
|
.pNext = &dev_features_1_2,
|
|
|
|
};
|
|
|
|
VkPhysicalDeviceFeatures2 dev_features = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
|
|
|
|
.pNext = &dev_features_1_1,
|
|
|
|
};
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
VkDeviceCreateInfo dev_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
|
2020-05-13 17:39:00 +02:00
|
|
|
.pNext = &hwctx->device_features,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
2020-05-13 17:39:00 +02:00
|
|
|
hwctx->device_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
|
2021-11-04 13:17:06 +02:00
|
|
|
hwctx->device_features.pNext = &p->device_features_1_1;
|
|
|
|
p->device_features_1_1.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
|
|
|
|
p->device_features_1_1.pNext = &p->device_features_1_2;
|
|
|
|
p->device_features_1_2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
|
2019-08-28 22:58:10 +02:00
|
|
|
ctx->free = vulkan_device_free;
|
|
|
|
|
|
|
|
/* Create an instance if not given one */
|
|
|
|
if ((err = create_instance(ctx, opts)))
|
|
|
|
goto end;
|
|
|
|
|
|
|
|
/* Find a device (if not given one) */
|
|
|
|
if ((err = find_device(ctx, dev_select)))
|
|
|
|
goto end;
|
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
vk->GetPhysicalDeviceFeatures2(hwctx->phys_dev, &dev_features);
|
2021-04-29 02:44:41 +02:00
|
|
|
|
|
|
|
/* Try to keep in sync with libplacebo */
|
2021-11-04 13:17:06 +02:00
|
|
|
#define COPY_FEATURE(DST, NAME) (DST).features.NAME = dev_features.features.NAME;
|
2020-05-13 17:39:00 +02:00
|
|
|
COPY_FEATURE(hwctx->device_features, shaderImageGatherExtended)
|
2020-11-25 13:50:33 +02:00
|
|
|
COPY_FEATURE(hwctx->device_features, shaderStorageImageReadWithoutFormat)
|
|
|
|
COPY_FEATURE(hwctx->device_features, shaderStorageImageWriteWithoutFormat)
|
2020-05-13 17:39:00 +02:00
|
|
|
COPY_FEATURE(hwctx->device_features, fragmentStoresAndAtomics)
|
|
|
|
COPY_FEATURE(hwctx->device_features, vertexPipelineStoresAndAtomics)
|
|
|
|
COPY_FEATURE(hwctx->device_features, shaderInt64)
|
|
|
|
#undef COPY_FEATURE
|
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
/* We require timeline semaphores */
|
2022-01-27 05:39:45 +02:00
|
|
|
if (!timeline_features.timelineSemaphore) {
|
2021-11-04 13:17:06 +02:00
|
|
|
av_log(ctx, AV_LOG_ERROR, "Device does not support timeline semaphores!\n");
|
|
|
|
err = AVERROR(ENOSYS);
|
2021-11-13 00:50:10 +02:00
|
|
|
goto end;
|
2021-11-04 13:17:06 +02:00
|
|
|
}
|
|
|
|
p->device_features_1_2.timelineSemaphore = 1;
|
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
/* Setup queue family */
|
|
|
|
if ((err = setup_queue_families(ctx, &dev_info)))
|
2019-08-28 22:58:10 +02:00
|
|
|
goto end;
|
|
|
|
|
2020-05-10 12:26:40 +02:00
|
|
|
if ((err = check_extensions(ctx, 1, opts, &dev_info.ppEnabledExtensionNames,
|
2020-05-12 21:01:25 +02:00
|
|
|
&dev_info.enabledExtensionCount, 0))) {
|
2021-11-07 09:16:11 +02:00
|
|
|
for (int i = 0; i < dev_info.queueCreateInfoCount; i++)
|
|
|
|
av_free((void *)dev_info.pQueueCreateInfos[i].pQueuePriorities);
|
|
|
|
av_free((void *)dev_info.pQueueCreateInfos);
|
2019-08-28 22:58:10 +02:00
|
|
|
goto end;
|
2020-05-12 21:01:25 +02:00
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateDevice(hwctx->phys_dev, &dev_info, hwctx->alloc,
|
|
|
|
&hwctx->act_dev);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
for (int i = 0; i < dev_info.queueCreateInfoCount; i++)
|
|
|
|
av_free((void *)dev_info.pQueueCreateInfos[i].pQueuePriorities);
|
|
|
|
av_free((void *)dev_info.pQueueCreateInfos);
|
2020-05-12 21:01:25 +02:00
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Device creation failure: %s\n",
|
|
|
|
vk_ret2str(ret));
|
2020-05-10 12:47:50 +02:00
|
|
|
for (int i = 0; i < dev_info.enabledExtensionCount; i++)
|
|
|
|
av_free((void *)dev_info.ppEnabledExtensionNames[i]);
|
|
|
|
av_free((void *)dev_info.ppEnabledExtensionNames);
|
2019-08-28 22:58:10 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Tiled images setting, use them by default */
|
|
|
|
opt_d = av_dict_get(opts, "linear_images", NULL, 0);
|
|
|
|
if (opt_d)
|
|
|
|
p->use_linear_images = strtol(opt_d->value, NULL, 10);
|
|
|
|
|
2021-12-07 11:05:51 +02:00
|
|
|
opt_d = av_dict_get(opts, "contiguous_planes", NULL, 0);
|
|
|
|
if (opt_d)
|
|
|
|
p->contiguous_planes = strtol(opt_d->value, NULL, 10);
|
|
|
|
else
|
|
|
|
p->contiguous_planes = -1;
|
|
|
|
|
2020-05-10 12:47:50 +02:00
|
|
|
hwctx->enabled_dev_extensions = dev_info.ppEnabledExtensionNames;
|
|
|
|
hwctx->nb_enabled_dev_extensions = dev_info.enabledExtensionCount;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
end:
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_device_init(AVHWDeviceContext *ctx)
|
|
|
|
{
|
2021-04-29 02:44:41 +02:00
|
|
|
int err;
|
2019-08-28 22:58:10 +02:00
|
|
|
uint32_t queue_num;
|
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2021-11-07 09:16:11 +02:00
|
|
|
int graph_index, comp_index, tx_index, enc_index, dec_index;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-05-10 12:47:50 +02:00
|
|
|
/* Set device extension flags */
|
|
|
|
for (int i = 0; i < hwctx->nb_enabled_dev_extensions; i++) {
|
|
|
|
for (int j = 0; j < FF_ARRAY_ELEMS(optional_device_exts); j++) {
|
|
|
|
if (!strcmp(hwctx->enabled_dev_extensions[i],
|
|
|
|
optional_device_exts[j].name)) {
|
|
|
|
p->extensions |= optional_device_exts[j].flag;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-07 16:57:35 +02:00
|
|
|
err = ff_vk_load_functions(ctx, vk, p->extensions, 1, 1);
|
2021-04-29 02:44:41 +02:00
|
|
|
if (err < 0) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to load functions!\n");
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2020-05-23 20:02:08 +02:00
|
|
|
p->props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
|
|
|
|
p->props.pNext = &p->hprops;
|
|
|
|
p->hprops.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetPhysicalDeviceProperties2(hwctx->phys_dev, &p->props);
|
2020-05-23 20:02:08 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Using device: %s\n",
|
|
|
|
p->props.properties.deviceName);
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Alignments:\n");
|
2021-04-29 18:32:24 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, " optimalBufferCopyRowPitchAlignment: %"PRIu64"\n",
|
2020-05-23 20:02:08 +02:00
|
|
|
p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
|
2021-04-29 18:32:24 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, " minMemoryMapAlignment: %"SIZE_SPECIFIER"\n",
|
2020-05-23 20:02:08 +02:00
|
|
|
p->props.properties.limits.minMemoryMapAlignment);
|
2021-11-07 16:57:35 +02:00
|
|
|
if (p->extensions & FF_VK_EXT_EXTERNAL_HOST_MEMORY)
|
2021-04-29 18:32:24 +02:00
|
|
|
av_log(ctx, AV_LOG_VERBOSE, " minImportedHostPointerAlignment: %"PRIu64"\n",
|
2020-05-23 20:02:08 +02:00
|
|
|
p->hprops.minImportedHostPointerAlignment);
|
|
|
|
|
|
|
|
p->dev_is_nvidia = (p->props.properties.vendorID == 0x10de);
|
2021-12-07 11:05:51 +02:00
|
|
|
p->dev_is_intel = (p->props.properties.vendorID == 0x8086);
|
2020-05-15 14:21:10 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->phys_dev, &queue_num, NULL);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (!queue_num) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to get queues!\n");
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
2021-11-07 09:16:11 +02:00
|
|
|
graph_index = hwctx->queue_family_index;
|
|
|
|
comp_index = hwctx->queue_family_comp_index;
|
|
|
|
tx_index = hwctx->queue_family_tx_index;
|
|
|
|
enc_index = hwctx->queue_family_encode_index;
|
|
|
|
dec_index = hwctx->queue_family_decode_index;
|
|
|
|
|
|
|
|
#define CHECK_QUEUE(type, required, fidx, ctx_qf, qc) \
|
|
|
|
do { \
|
|
|
|
if (ctx_qf < 0 && required) { \
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "%s queue family is required, but marked as missing" \
|
|
|
|
" in the context!\n", type); \
|
|
|
|
return AVERROR(EINVAL); \
|
|
|
|
} else if (fidx < 0 || ctx_qf < 0) { \
|
|
|
|
break; \
|
|
|
|
} else if (ctx_qf >= queue_num) { \
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Invalid %s family index %i (device has %i families)!\n", \
|
|
|
|
type, ctx_qf, queue_num); \
|
|
|
|
return AVERROR(EINVAL); \
|
|
|
|
} \
|
|
|
|
\
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i (queues: %i)" \
|
|
|
|
" for%s%s%s%s%s\n", \
|
|
|
|
ctx_qf, qc, \
|
|
|
|
ctx_qf == graph_index ? " graphics" : "", \
|
|
|
|
ctx_qf == comp_index ? " compute" : "", \
|
|
|
|
ctx_qf == tx_index ? " transfers" : "", \
|
|
|
|
ctx_qf == enc_index ? " encode" : "", \
|
|
|
|
ctx_qf == dec_index ? " decode" : ""); \
|
|
|
|
graph_index = (ctx_qf == graph_index) ? -1 : graph_index; \
|
|
|
|
comp_index = (ctx_qf == comp_index) ? -1 : comp_index; \
|
|
|
|
tx_index = (ctx_qf == tx_index) ? -1 : tx_index; \
|
|
|
|
enc_index = (ctx_qf == enc_index) ? -1 : enc_index; \
|
|
|
|
dec_index = (ctx_qf == dec_index) ? -1 : dec_index; \
|
|
|
|
p->qfs[p->num_qfs++] = ctx_qf; \
|
|
|
|
} while (0)
|
|
|
|
|
|
|
|
CHECK_QUEUE("graphics", 0, graph_index, hwctx->queue_family_index, hwctx->nb_graphics_queues);
|
|
|
|
CHECK_QUEUE("upload", 1, tx_index, hwctx->queue_family_tx_index, hwctx->nb_tx_queues);
|
|
|
|
CHECK_QUEUE("compute", 1, comp_index, hwctx->queue_family_comp_index, hwctx->nb_comp_queues);
|
|
|
|
CHECK_QUEUE("encode", 0, enc_index, hwctx->queue_family_encode_index, hwctx->nb_encode_queues);
|
|
|
|
CHECK_QUEUE("decode", 0, dec_index, hwctx->queue_family_decode_index, hwctx->nb_decode_queues);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
#undef CHECK_QUEUE
|
|
|
|
|
|
|
|
/* Get device capabilities */
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetPhysicalDeviceMemoryProperties(hwctx->phys_dev, &p->mprops);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_device_create(AVHWDeviceContext *ctx, const char *device,
|
|
|
|
AVDictionary *opts, int flags)
|
|
|
|
{
|
|
|
|
VulkanDeviceSelection dev_select = { 0 };
|
|
|
|
if (device && device[0]) {
|
|
|
|
char *end = NULL;
|
|
|
|
dev_select.index = strtol(device, &end, 10);
|
|
|
|
if (end == device) {
|
|
|
|
dev_select.index = 0;
|
|
|
|
dev_select.name = device;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_device_derive(AVHWDeviceContext *ctx,
|
2020-05-20 21:58:03 +02:00
|
|
|
AVHWDeviceContext *src_ctx,
|
|
|
|
AVDictionary *opts, int flags)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
av_unused VulkanDeviceSelection dev_select = { 0 };
|
|
|
|
|
|
|
|
/* If there's only one device on the system, then even if its not covered
|
|
|
|
* by the following checks (e.g. non-PCIe ARM GPU), having an empty
|
|
|
|
* dev_select will mean it'll get picked. */
|
|
|
|
switch(src_ctx->type) {
|
|
|
|
#if CONFIG_LIBDRM
|
|
|
|
#if CONFIG_VAAPI
|
|
|
|
case AV_HWDEVICE_TYPE_VAAPI: {
|
|
|
|
AVVAAPIDeviceContext *src_hwctx = src_ctx->hwctx;
|
|
|
|
|
|
|
|
const char *vendor = vaQueryVendorString(src_hwctx->display);
|
|
|
|
if (!vendor) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to get device info from VAAPI!\n");
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (strstr(vendor, "Intel"))
|
|
|
|
dev_select.vendor_id = 0x8086;
|
|
|
|
if (strstr(vendor, "AMD"))
|
|
|
|
dev_select.vendor_id = 0x1002;
|
|
|
|
|
2020-05-20 21:58:03 +02:00
|
|
|
return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
case AV_HWDEVICE_TYPE_DRM: {
|
|
|
|
AVDRMDeviceContext *src_hwctx = src_ctx->hwctx;
|
|
|
|
|
|
|
|
drmDevice *drm_dev_info;
|
|
|
|
int err = drmGetDevice(src_hwctx->fd, &drm_dev_info);
|
|
|
|
if (err) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to get device info from DRM fd!\n");
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (drm_dev_info->bustype == DRM_BUS_PCI)
|
|
|
|
dev_select.pci_device = drm_dev_info->deviceinfo.pci->device_id;
|
|
|
|
|
|
|
|
drmFreeDevice(&drm_dev_info);
|
|
|
|
|
2020-05-20 21:58:03 +02:00
|
|
|
return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
case AV_HWDEVICE_TYPE_CUDA: {
|
|
|
|
AVHWDeviceContext *cuda_cu = src_ctx;
|
|
|
|
AVCUDADeviceContext *src_hwctx = src_ctx->hwctx;
|
|
|
|
AVCUDADeviceContextInternal *cu_internal = src_hwctx->internal;
|
|
|
|
CudaFunctions *cu = cu_internal->cuda_dl;
|
|
|
|
|
|
|
|
int ret = CHECK_CU(cu->cuDeviceGetUuid((CUuuid *)&dev_select.uuid,
|
|
|
|
cu_internal->cuda_device));
|
|
|
|
if (ret < 0) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to get UUID from CUDA!\n");
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
dev_select.has_uuid = 1;
|
|
|
|
|
2020-05-20 21:58:03 +02:00
|
|
|
return vulkan_device_create_internal(ctx, &dev_select, opts, flags);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
default:
|
|
|
|
return AVERROR(ENOSYS);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_frames_get_constraints(AVHWDeviceContext *ctx,
|
|
|
|
const void *hwconfig,
|
|
|
|
AVHWFramesConstraints *constraints)
|
|
|
|
{
|
|
|
|
int count = 0;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
|
|
|
|
|
|
|
for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
|
2021-04-29 02:44:41 +02:00
|
|
|
count += pixfmt_is_supported(ctx, i, p->use_linear_images);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
if (p->dev_is_nvidia)
|
|
|
|
count++;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
constraints->valid_sw_formats = av_malloc_array(count + 1,
|
|
|
|
sizeof(enum AVPixelFormat));
|
|
|
|
if (!constraints->valid_sw_formats)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
count = 0;
|
|
|
|
for (enum AVPixelFormat i = 0; i < AV_PIX_FMT_NB; i++)
|
2021-04-29 02:44:41 +02:00
|
|
|
if (pixfmt_is_supported(ctx, i, p->use_linear_images))
|
2019-08-28 22:58:10 +02:00
|
|
|
constraints->valid_sw_formats[count++] = i;
|
|
|
|
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
if (p->dev_is_nvidia)
|
|
|
|
constraints->valid_sw_formats[count++] = AV_PIX_FMT_CUDA;
|
|
|
|
#endif
|
|
|
|
constraints->valid_sw_formats[count++] = AV_PIX_FMT_NONE;
|
|
|
|
|
|
|
|
constraints->min_width = 0;
|
|
|
|
constraints->min_height = 0;
|
2020-05-23 20:02:08 +02:00
|
|
|
constraints->max_width = p->props.properties.limits.maxImageDimension2D;
|
|
|
|
constraints->max_height = p->props.properties.limits.maxImageDimension2D;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
constraints->valid_hw_formats = av_malloc_array(2, sizeof(enum AVPixelFormat));
|
|
|
|
if (!constraints->valid_hw_formats)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
constraints->valid_hw_formats[0] = AV_PIX_FMT_VULKAN;
|
|
|
|
constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int alloc_mem(AVHWDeviceContext *ctx, VkMemoryRequirements *req,
|
2020-05-25 21:57:16 +02:00
|
|
|
VkMemoryPropertyFlagBits req_flags, const void *alloc_extension,
|
2019-08-28 22:58:10 +02:00
|
|
|
VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
|
|
|
|
{
|
|
|
|
VkResult ret;
|
|
|
|
int index = -1;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *dev_hwctx = ctx->hwctx;
|
|
|
|
VkMemoryAllocateInfo alloc_info = {
|
2020-05-23 20:02:08 +02:00
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
|
|
|
|
.pNext = alloc_extension,
|
|
|
|
.allocationSize = req->size,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
/* The vulkan spec requires memory types to be sorted in the "optimal"
|
|
|
|
* order, so the first matching type we find will be the best/fastest one */
|
|
|
|
for (int i = 0; i < p->mprops.memoryTypeCount; i++) {
|
2020-11-25 00:36:08 +02:00
|
|
|
const VkMemoryType *type = &p->mprops.memoryTypes[i];
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* The memory type must be supported by the requirements (bitfield) */
|
|
|
|
if (!(req->memoryTypeBits & (1 << i)))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
/* The memory type flags must include our properties */
|
2020-11-25 00:36:08 +02:00
|
|
|
if ((type->propertyFlags & req_flags) != req_flags)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
/* The memory type must be large enough */
|
|
|
|
if (req->size > p->mprops.memoryHeaps[type->heapIndex].size)
|
2019-08-28 22:58:10 +02:00
|
|
|
continue;
|
|
|
|
|
|
|
|
/* Found a suitable memory type */
|
|
|
|
index = i;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (index < 0) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "No memory type found for flags 0x%x\n",
|
|
|
|
req_flags);
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
}
|
|
|
|
|
|
|
|
alloc_info.memoryTypeIndex = index;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->AllocateMemory(dev_hwctx->act_dev, &alloc_info,
|
|
|
|
dev_hwctx->alloc, mem);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to allocate memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
}
|
|
|
|
|
|
|
|
*mem_flags |= p->mprops.memoryTypes[index].propertyFlags;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2021-11-13 05:14:04 +02:00
|
|
|
static void vulkan_free_internal(AVVkFrame *f)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
2021-11-13 05:14:04 +02:00
|
|
|
AVVkFrameInternal *internal = f->internal;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
if (!internal)
|
|
|
|
return;
|
|
|
|
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
if (internal->cuda_fc_ref) {
|
|
|
|
AVHWFramesContext *cuda_fc = (AVHWFramesContext *)internal->cuda_fc_ref->data;
|
|
|
|
int planes = av_pix_fmt_count_planes(cuda_fc->sw_format);
|
|
|
|
AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
|
|
|
|
AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
|
|
|
|
AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
|
|
|
|
CudaFunctions *cu = cu_internal->cuda_dl;
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
2020-05-12 00:27:01 +02:00
|
|
|
if (internal->cu_sem[i])
|
|
|
|
CHECK_CU(cu->cuDestroyExternalSemaphore(internal->cu_sem[i]));
|
2019-08-28 22:58:10 +02:00
|
|
|
if (internal->cu_mma[i])
|
|
|
|
CHECK_CU(cu->cuMipmappedArrayDestroy(internal->cu_mma[i]));
|
|
|
|
if (internal->ext_mem[i])
|
|
|
|
CHECK_CU(cu->cuDestroyExternalMemory(internal->ext_mem[i]));
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
if (internal->ext_sem_handle[i])
|
|
|
|
CloseHandle(internal->ext_sem_handle[i]);
|
|
|
|
if (internal->ext_mem_handle[i])
|
|
|
|
CloseHandle(internal->ext_mem_handle[i]);
|
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
av_buffer_unref(&internal->cuda_fc_ref);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2021-11-13 05:14:04 +02:00
|
|
|
av_freep(&f->internal);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static void vulkan_frame_free(void *opaque, uint8_t *data)
|
|
|
|
{
|
|
|
|
AVVkFrame *f = (AVVkFrame *)data;
|
|
|
|
AVHWFramesContext *hwfc = opaque;
|
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
|
|
|
|
2021-11-12 14:05:42 +02:00
|
|
|
/* We could use vkWaitSemaphores, but the validation layer seems to have
|
|
|
|
* issues tracking command buffer execution state on uninit. */
|
|
|
|
vk->DeviceWaitIdle(hwctx->act_dev);
|
2021-11-07 09:44:46 +02:00
|
|
|
|
2021-11-13 05:14:04 +02:00
|
|
|
vulkan_free_internal(f);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->DestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
|
|
|
|
vk->FreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
|
|
|
|
vk->DestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
av_free(f);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int alloc_bind_mem(AVHWFramesContext *hwfc, AVVkFrame *f,
|
|
|
|
void *alloc_pnext, size_t alloc_pnext_stride)
|
|
|
|
{
|
|
|
|
int err;
|
|
|
|
VkResult ret;
|
|
|
|
AVHWDeviceContext *ctx = hwfc->device_ctx;
|
2020-05-23 20:02:08 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2021-12-07 11:05:51 +02:00
|
|
|
AVVulkanFramesContext *hwfctx = hwfc->hwctx;
|
2019-08-28 22:58:10 +02:00
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
|
|
|
VkBindImageMemoryInfo bind_info[AV_NUM_DATA_POINTERS] = { { 0 } };
|
|
|
|
|
2021-12-07 11:05:51 +02:00
|
|
|
VkMemoryRequirements cont_memory_requirements = { 0 };
|
|
|
|
int cont_mem_size_list[AV_NUM_DATA_POINTERS] = { 0 };
|
|
|
|
int cont_mem_size = 0;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
int use_ded_mem;
|
|
|
|
VkImageMemoryRequirementsInfo2 req_desc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
|
|
|
|
.image = f->img[i],
|
|
|
|
};
|
|
|
|
VkMemoryDedicatedAllocateInfo ded_alloc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
|
|
|
|
.pNext = (void *)(((uint8_t *)alloc_pnext) + i*alloc_pnext_stride),
|
|
|
|
};
|
|
|
|
VkMemoryDedicatedRequirements ded_req = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
|
|
|
|
};
|
|
|
|
VkMemoryRequirements2 req = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
|
|
|
|
.pNext = &ded_req,
|
|
|
|
};
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-05-23 20:02:08 +02:00
|
|
|
if (f->tiling == VK_IMAGE_TILING_LINEAR)
|
|
|
|
req.memoryRequirements.size = FFALIGN(req.memoryRequirements.size,
|
|
|
|
p->props.properties.limits.minMemoryMapAlignment);
|
|
|
|
|
2021-12-07 11:05:51 +02:00
|
|
|
if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY) {
|
|
|
|
if (ded_req.requiresDedicatedAllocation) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Cannot allocate all planes in a single allocation, "
|
|
|
|
"device requires dedicated image allocation!\n");
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
} else if (!i) {
|
|
|
|
cont_memory_requirements = req.memoryRequirements;
|
|
|
|
} else if (cont_memory_requirements.memoryTypeBits !=
|
|
|
|
req.memoryRequirements.memoryTypeBits) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "The memory requirements differ between plane 0 "
|
|
|
|
"and %i, cannot allocate in a single region!\n",
|
|
|
|
i);
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
}
|
|
|
|
|
|
|
|
cont_mem_size_list[i] = FFALIGN(req.memoryRequirements.size,
|
|
|
|
req.memoryRequirements.alignment);
|
|
|
|
cont_mem_size += cont_mem_size_list[i];
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* In case the implementation prefers/requires dedicated allocation */
|
|
|
|
use_ded_mem = ded_req.prefersDedicatedAllocation |
|
|
|
|
ded_req.requiresDedicatedAllocation;
|
|
|
|
if (use_ded_mem)
|
|
|
|
ded_alloc.image = f->img[i];
|
|
|
|
|
|
|
|
/* Allocate memory */
|
|
|
|
if ((err = alloc_mem(ctx, &req.memoryRequirements,
|
|
|
|
f->tiling == VK_IMAGE_TILING_LINEAR ?
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
|
|
|
|
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
|
|
|
|
use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
|
|
|
|
&f->flags, &f->mem[i])))
|
|
|
|
return err;
|
|
|
|
|
|
|
|
f->size[i] = req.memoryRequirements.size;
|
|
|
|
bind_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
|
|
|
|
bind_info[i].image = f->img[i];
|
|
|
|
bind_info[i].memory = f->mem[i];
|
|
|
|
}
|
|
|
|
|
2021-12-07 11:05:51 +02:00
|
|
|
if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY) {
|
|
|
|
cont_memory_requirements.size = cont_mem_size;
|
|
|
|
|
|
|
|
/* Allocate memory */
|
|
|
|
if ((err = alloc_mem(ctx, &cont_memory_requirements,
|
|
|
|
f->tiling == VK_IMAGE_TILING_LINEAR ?
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
|
|
|
|
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
|
|
|
|
(void *)(((uint8_t *)alloc_pnext)),
|
|
|
|
&f->flags, &f->mem[0])))
|
|
|
|
return err;
|
|
|
|
|
|
|
|
f->size[0] = cont_memory_requirements.size;
|
|
|
|
|
2022-01-05 08:33:30 +02:00
|
|
|
for (int i = 0, offset = 0; i < planes; i++) {
|
|
|
|
bind_info[i].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
|
|
|
|
bind_info[i].image = f->img[i];
|
|
|
|
bind_info[i].memory = f->mem[0];
|
|
|
|
bind_info[i].memoryOffset = offset;
|
|
|
|
|
2021-12-07 11:05:51 +02:00
|
|
|
f->offset[i] = bind_info[i].memoryOffset;
|
2022-01-05 08:33:30 +02:00
|
|
|
offset += cont_mem_size_list[i];
|
2021-12-07 11:05:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* Bind the allocated memory to the images */
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->BindImageMemory2(hwctx->act_dev, planes, bind_info);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-03-12 20:01:07 +02:00
|
|
|
enum PrepMode {
|
|
|
|
PREP_MODE_WRITE,
|
2020-05-10 23:00:38 +02:00
|
|
|
PREP_MODE_EXTERNAL_EXPORT,
|
2021-11-13 00:51:11 +02:00
|
|
|
PREP_MODE_EXTERNAL_IMPORT
|
2020-03-12 20:01:07 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
static int prepare_frame(AVHWFramesContext *hwfc, VulkanExecCtx *ectx,
|
|
|
|
AVVkFrame *frame, enum PrepMode pmode)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
2020-05-14 01:28:00 +02:00
|
|
|
int err;
|
2021-11-13 00:51:11 +02:00
|
|
|
uint32_t src_qf, dst_qf;
|
2020-03-12 20:01:07 +02:00
|
|
|
VkImageLayout new_layout;
|
|
|
|
VkAccessFlags new_access;
|
2019-08-28 22:58:10 +02:00
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2021-11-04 13:17:06 +02:00
|
|
|
uint64_t sem_sig_val[AV_NUM_DATA_POINTERS];
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
|
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
VkTimelineSemaphoreSubmitInfo s_timeline_sem_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
|
|
|
|
.pSignalSemaphoreValues = sem_sig_val,
|
|
|
|
.signalSemaphoreValueCount = planes,
|
|
|
|
};
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
VkSubmitInfo s_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
|
2021-11-04 13:17:06 +02:00
|
|
|
.pNext = &s_timeline_sem_info,
|
2020-05-12 00:27:01 +02:00
|
|
|
.pSignalSemaphores = frame->sem,
|
|
|
|
.signalSemaphoreCount = planes,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
2020-05-12 20:08:54 +02:00
|
|
|
VkPipelineStageFlagBits wait_st[AV_NUM_DATA_POINTERS];
|
2021-11-04 13:17:06 +02:00
|
|
|
for (int i = 0; i < planes; i++) {
|
2020-05-12 20:08:54 +02:00
|
|
|
wait_st[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
2021-11-04 13:17:06 +02:00
|
|
|
sem_sig_val[i] = frame->sem_value[i] + 1;
|
|
|
|
}
|
2020-05-12 20:08:54 +02:00
|
|
|
|
2020-03-12 20:01:07 +02:00
|
|
|
switch (pmode) {
|
|
|
|
case PREP_MODE_WRITE:
|
|
|
|
new_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
|
|
|
new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
|
2021-11-13 00:51:11 +02:00
|
|
|
src_qf = VK_QUEUE_FAMILY_IGNORED;
|
2020-05-10 23:00:38 +02:00
|
|
|
dst_qf = VK_QUEUE_FAMILY_IGNORED;
|
2020-03-12 20:01:07 +02:00
|
|
|
break;
|
2021-11-13 00:51:11 +02:00
|
|
|
case PREP_MODE_EXTERNAL_IMPORT:
|
|
|
|
new_layout = VK_IMAGE_LAYOUT_GENERAL;
|
|
|
|
new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
|
|
|
|
src_qf = VK_QUEUE_FAMILY_EXTERNAL_KHR;
|
|
|
|
dst_qf = VK_QUEUE_FAMILY_IGNORED;
|
|
|
|
s_timeline_sem_info.pWaitSemaphoreValues = frame->sem_value;
|
|
|
|
s_timeline_sem_info.waitSemaphoreValueCount = planes;
|
|
|
|
s_info.pWaitSemaphores = frame->sem;
|
|
|
|
s_info.pWaitDstStageMask = wait_st;
|
|
|
|
s_info.waitSemaphoreCount = planes;
|
|
|
|
break;
|
2020-05-10 23:00:38 +02:00
|
|
|
case PREP_MODE_EXTERNAL_EXPORT:
|
|
|
|
new_layout = VK_IMAGE_LAYOUT_GENERAL;
|
|
|
|
new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
|
2021-11-13 00:51:11 +02:00
|
|
|
src_qf = VK_QUEUE_FAMILY_IGNORED;
|
2020-05-10 23:00:38 +02:00
|
|
|
dst_qf = VK_QUEUE_FAMILY_EXTERNAL_KHR;
|
2021-11-04 13:17:06 +02:00
|
|
|
s_timeline_sem_info.pWaitSemaphoreValues = frame->sem_value;
|
|
|
|
s_timeline_sem_info.waitSemaphoreValueCount = planes;
|
2020-05-12 20:08:54 +02:00
|
|
|
s_info.pWaitSemaphores = frame->sem;
|
|
|
|
s_info.pWaitDstStageMask = wait_st;
|
|
|
|
s_info.waitSemaphoreCount = planes;
|
2020-03-12 20:01:07 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
if ((err = wait_start_exec_ctx(hwfc, ectx)))
|
2020-05-14 01:28:00 +02:00
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Change the image layout to something more optimal for writes.
|
|
|
|
* This also signals the newly created semaphore, making it usable
|
|
|
|
* for synchronization */
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
img_bar[i].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
|
|
|
img_bar[i].srcAccessMask = 0x0;
|
2020-03-12 20:01:07 +02:00
|
|
|
img_bar[i].dstAccessMask = new_access;
|
2019-08-28 22:58:10 +02:00
|
|
|
img_bar[i].oldLayout = frame->layout[i];
|
2020-03-12 20:01:07 +02:00
|
|
|
img_bar[i].newLayout = new_layout;
|
2021-11-13 00:51:11 +02:00
|
|
|
img_bar[i].srcQueueFamilyIndex = src_qf;
|
2020-05-10 23:00:38 +02:00
|
|
|
img_bar[i].dstQueueFamilyIndex = dst_qf;
|
2019-08-28 22:58:10 +02:00
|
|
|
img_bar[i].image = frame->img[i];
|
|
|
|
img_bar[i].subresourceRange.levelCount = 1;
|
|
|
|
img_bar[i].subresourceRange.layerCount = 1;
|
|
|
|
img_bar[i].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
|
|
|
|
frame->layout[i] = img_bar[i].newLayout;
|
|
|
|
frame->access[i] = img_bar[i].dstAccessMask;
|
|
|
|
}
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->CmdPipelineBarrier(get_buf_exec_ctx(hwfc, ectx),
|
|
|
|
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT,
|
|
|
|
0, 0, NULL, 0, NULL, planes, img_bar);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
return submit_exec_ctx(hwfc, ectx, &s_info, frame, 0);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
static inline void get_plane_wh(int *w, int *h, enum AVPixelFormat format,
|
|
|
|
int frame_w, int frame_h, int plane)
|
|
|
|
{
|
|
|
|
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format);
|
|
|
|
|
|
|
|
/* Currently always true unless gray + alpha support is added */
|
|
|
|
if (!plane || (plane == 3) || desc->flags & AV_PIX_FMT_FLAG_RGB ||
|
|
|
|
!(desc->flags & AV_PIX_FMT_FLAG_PLANAR)) {
|
|
|
|
*w = frame_w;
|
|
|
|
*h = frame_h;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
*w = AV_CEIL_RSHIFT(frame_w, desc->log2_chroma_w);
|
|
|
|
*h = AV_CEIL_RSHIFT(frame_h, desc->log2_chroma_h);
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
static int create_frame(AVHWFramesContext *hwfc, AVVkFrame **frame,
|
|
|
|
VkImageTiling tiling, VkImageUsageFlagBits usage,
|
|
|
|
void *create_pnext)
|
|
|
|
{
|
|
|
|
int err;
|
|
|
|
VkResult ret;
|
|
|
|
AVHWDeviceContext *ctx = hwfc->device_ctx;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
enum AVPixelFormat format = hwfc->sw_format;
|
|
|
|
const VkFormat *img_fmts = av_vkfmt_from_pixfmt(format);
|
|
|
|
const int planes = av_pix_fmt_count_planes(format);
|
|
|
|
|
|
|
|
VkExportSemaphoreCreateInfo ext_sem_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
.handleTypes = IsWindows8OrGreater()
|
|
|
|
? VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT
|
|
|
|
: VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
|
|
|
|
#else
|
2019-08-28 22:58:10 +02:00
|
|
|
.handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
VkSemaphoreTypeCreateInfo sem_type_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
.pNext = p->extensions & FF_VK_EXT_EXTERNAL_WIN32_SEM ? &ext_sem_info : NULL,
|
|
|
|
#else
|
2021-11-07 16:57:35 +02:00
|
|
|
.pNext = p->extensions & FF_VK_EXT_EXTERNAL_FD_SEM ? &ext_sem_info : NULL,
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2021-11-04 13:17:06 +02:00
|
|
|
.semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE,
|
|
|
|
.initialValue = 0,
|
|
|
|
};
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
VkSemaphoreCreateInfo sem_spawn = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
|
2021-11-04 13:17:06 +02:00
|
|
|
.pNext = &sem_type_info,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
AVVkFrame *f = av_vk_frame_alloc();
|
|
|
|
if (!f) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Create the images */
|
|
|
|
for (int i = 0; i < planes; i++) {
|
2020-11-25 23:32:48 +02:00
|
|
|
VkImageCreateInfo create_info = {
|
2020-05-10 22:53:39 +02:00
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
|
|
|
|
.pNext = create_pnext,
|
|
|
|
.imageType = VK_IMAGE_TYPE_2D,
|
|
|
|
.format = img_fmts[i],
|
|
|
|
.extent.depth = 1,
|
|
|
|
.mipLevels = 1,
|
|
|
|
.arrayLayers = 1,
|
|
|
|
.flags = VK_IMAGE_CREATE_ALIAS_BIT,
|
|
|
|
.tiling = tiling,
|
|
|
|
.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
|
|
|
|
.usage = usage,
|
|
|
|
.samples = VK_SAMPLE_COUNT_1_BIT,
|
|
|
|
.pQueueFamilyIndices = p->qfs,
|
|
|
|
.queueFamilyIndexCount = p->num_qfs,
|
|
|
|
.sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
|
|
|
|
VK_SHARING_MODE_EXCLUSIVE,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
get_plane_wh(&create_info.extent.width, &create_info.extent.height,
|
|
|
|
format, hwfc->width, hwfc->height, i);
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateImage(hwctx->act_dev, &create_info,
|
|
|
|
hwctx->alloc, &f->img[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR(EINVAL);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2020-05-12 00:27:01 +02:00
|
|
|
/* Create semaphore */
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateSemaphore(hwctx->act_dev, &sem_spawn,
|
|
|
|
hwctx->alloc, &f->sem[i]);
|
2020-05-12 00:27:01 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
f->layout[i] = create_info.initialLayout;
|
2019-08-28 22:58:10 +02:00
|
|
|
f->access[i] = 0x0;
|
2021-11-04 13:17:06 +02:00
|
|
|
f->sem_value[i] = 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
f->flags = 0x0;
|
|
|
|
f->tiling = tiling;
|
|
|
|
|
|
|
|
*frame = f;
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
fail:
|
|
|
|
vulkan_frame_free(hwfc, (uint8_t *)f);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Checks if an export flag is enabled, and if it is ORs it with *iexp */
|
|
|
|
static void try_export_flags(AVHWFramesContext *hwfc,
|
|
|
|
VkExternalMemoryHandleTypeFlags *comp_handle_types,
|
|
|
|
VkExternalMemoryHandleTypeFlagBits *iexp,
|
|
|
|
VkExternalMemoryHandleTypeFlagBits exp)
|
|
|
|
{
|
|
|
|
VkResult ret;
|
|
|
|
AVVulkanFramesContext *hwctx = hwfc->hwctx;
|
|
|
|
AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2021-12-07 11:05:53 +02:00
|
|
|
|
|
|
|
const VkImageDrmFormatModifierListCreateInfoEXT *drm_mod_info =
|
|
|
|
vk_find_struct(hwctx->create_pnext,
|
|
|
|
VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT);
|
|
|
|
int has_mods = hwctx->tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT && drm_mod_info;
|
|
|
|
int nb_mods;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
VkExternalImageFormatProperties eprops = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
|
|
|
|
};
|
|
|
|
VkImageFormatProperties2 props = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
|
|
|
|
.pNext = &eprops,
|
|
|
|
};
|
2021-12-07 11:05:53 +02:00
|
|
|
VkPhysicalDeviceImageDrmFormatModifierInfoEXT phy_dev_mod_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
|
|
|
|
.pNext = NULL,
|
|
|
|
.pQueueFamilyIndices = p->qfs,
|
|
|
|
.queueFamilyIndexCount = p->num_qfs,
|
|
|
|
.sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
|
|
|
|
VK_SHARING_MODE_EXCLUSIVE,
|
|
|
|
};
|
2019-08-28 22:58:10 +02:00
|
|
|
VkPhysicalDeviceExternalImageFormatInfo enext = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
|
|
|
|
.handleType = exp,
|
2021-12-07 11:05:53 +02:00
|
|
|
.pNext = has_mods ? &phy_dev_mod_info : NULL,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
VkPhysicalDeviceImageFormatInfo2 pinfo = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
|
|
|
|
.pNext = !exp ? NULL : &enext,
|
|
|
|
.format = av_vkfmt_from_pixfmt(hwfc->sw_format)[0],
|
|
|
|
.type = VK_IMAGE_TYPE_2D,
|
|
|
|
.tiling = hwctx->tiling,
|
|
|
|
.usage = hwctx->usage,
|
|
|
|
.flags = VK_IMAGE_CREATE_ALIAS_BIT,
|
|
|
|
};
|
|
|
|
|
2021-12-07 11:05:53 +02:00
|
|
|
nb_mods = has_mods ? drm_mod_info->drmFormatModifierCount : 1;
|
|
|
|
for (int i = 0; i < nb_mods; i++) {
|
|
|
|
if (has_mods)
|
|
|
|
phy_dev_mod_info.drmFormatModifier = drm_mod_info->pDrmFormatModifiers[i];
|
|
|
|
|
|
|
|
ret = vk->GetPhysicalDeviceImageFormatProperties2(dev_hwctx->phys_dev,
|
|
|
|
&pinfo, &props);
|
|
|
|
|
|
|
|
if (ret == VK_SUCCESS) {
|
|
|
|
*iexp |= exp;
|
|
|
|
*comp_handle_types |= eprops.externalMemoryProperties.compatibleHandleTypes;
|
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-14 14:59:32 +02:00
|
|
|
static AVBufferRef *vulkan_pool_alloc(void *opaque, size_t size)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
int err;
|
|
|
|
AVVkFrame *f;
|
|
|
|
AVBufferRef *avbuf = NULL;
|
|
|
|
AVHWFramesContext *hwfc = opaque;
|
|
|
|
AVVulkanFramesContext *hwctx = hwfc->hwctx;
|
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2020-05-15 00:37:14 +02:00
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkExportMemoryAllocateInfo eminfo[AV_NUM_DATA_POINTERS];
|
|
|
|
VkExternalMemoryHandleTypeFlags e = 0x0;
|
|
|
|
|
|
|
|
VkExternalMemoryImageCreateInfo eiinfo = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
|
|
|
|
.pNext = hwctx->create_pnext,
|
|
|
|
};
|
|
|
|
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
if (p->extensions & FF_VK_EXT_EXTERNAL_WIN32_MEMORY)
|
|
|
|
try_export_flags(hwfc, &eiinfo.handleTypes, &e, IsWindows8OrGreater()
|
|
|
|
? VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT
|
|
|
|
: VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT);
|
|
|
|
#else
|
2021-11-07 16:57:35 +02:00
|
|
|
if (p->extensions & FF_VK_EXT_EXTERNAL_FD_MEMORY)
|
2019-08-28 22:58:10 +02:00
|
|
|
try_export_flags(hwfc, &eiinfo.handleTypes, &e,
|
|
|
|
VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
|
|
|
|
|
2021-11-07 16:57:35 +02:00
|
|
|
if (p->extensions & (FF_VK_EXT_EXTERNAL_DMABUF_MEMORY | FF_VK_EXT_DRM_MODIFIER_FLAGS))
|
2019-08-28 22:58:10 +02:00
|
|
|
try_export_flags(hwfc, &eiinfo.handleTypes, &e,
|
|
|
|
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
for (int i = 0; i < av_pix_fmt_count_planes(hwfc->sw_format); i++) {
|
|
|
|
eminfo[i].sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
|
|
|
|
eminfo[i].pNext = hwctx->alloc_pnext[i];
|
|
|
|
eminfo[i].handleTypes = e;
|
|
|
|
}
|
|
|
|
|
|
|
|
err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
|
|
|
|
eiinfo.handleTypes ? &eiinfo : NULL);
|
|
|
|
if (err)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
err = alloc_bind_mem(hwfc, f, eminfo, sizeof(*eminfo));
|
|
|
|
if (err)
|
|
|
|
goto fail;
|
|
|
|
|
2020-05-15 00:37:14 +02:00
|
|
|
err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_WRITE);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (err)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
avbuf = av_buffer_create((uint8_t *)f, sizeof(AVVkFrame),
|
|
|
|
vulkan_frame_free, hwfc, 0);
|
|
|
|
if (!avbuf)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
return avbuf;
|
|
|
|
|
|
|
|
fail:
|
|
|
|
vulkan_frame_free(hwfc, (uint8_t *)f);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void vulkan_frames_uninit(AVHWFramesContext *hwfc)
|
|
|
|
{
|
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
|
|
|
|
2021-12-07 11:05:53 +02:00
|
|
|
if (fp->modifier_info) {
|
|
|
|
if (fp->modifier_info->pDrmFormatModifiers)
|
|
|
|
av_freep(&fp->modifier_info->pDrmFormatModifiers);
|
|
|
|
av_freep(&fp->modifier_info);
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
free_exec_ctx(hwfc, &fp->conv_ctx);
|
|
|
|
free_exec_ctx(hwfc, &fp->upload_ctx);
|
|
|
|
free_exec_ctx(hwfc, &fp->download_ctx);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_frames_init(AVHWFramesContext *hwfc)
|
|
|
|
{
|
|
|
|
int err;
|
|
|
|
AVVkFrame *f;
|
|
|
|
AVVulkanFramesContext *hwctx = hwfc->hwctx;
|
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
|
|
|
AVVulkanDeviceContext *dev_hwctx = hwfc->device_ctx->hwctx;
|
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-12-07 11:05:53 +02:00
|
|
|
const VkImageDrmFormatModifierListCreateInfoEXT *modifier_info;
|
|
|
|
const int has_modifiers = !!(p->extensions & FF_VK_EXT_DRM_MODIFIER_FLAGS);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-12-07 11:05:53 +02:00
|
|
|
/* Default tiling flags */
|
|
|
|
hwctx->tiling = hwctx->tiling ? hwctx->tiling :
|
|
|
|
has_modifiers ? VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT :
|
|
|
|
p->use_linear_images ? VK_IMAGE_TILING_LINEAR :
|
|
|
|
VK_IMAGE_TILING_OPTIMAL;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-05-15 01:16:58 +02:00
|
|
|
if (!hwctx->usage)
|
2021-11-19 08:46:15 +02:00
|
|
|
hwctx->usage = FF_VK_DEFAULT_USAGE_FLAGS;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-12-07 11:05:51 +02:00
|
|
|
if (!(hwctx->flags & AV_VK_FRAME_FLAG_NONE)) {
|
|
|
|
if (p->contiguous_planes == 1 ||
|
|
|
|
((p->contiguous_planes == -1) && p->dev_is_intel))
|
|
|
|
hwctx->flags |= AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY;
|
|
|
|
}
|
|
|
|
|
2021-12-07 11:05:53 +02:00
|
|
|
modifier_info = vk_find_struct(hwctx->create_pnext,
|
|
|
|
VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT);
|
|
|
|
|
|
|
|
/* Get the supported modifiers if the user has not given any. */
|
|
|
|
if (has_modifiers && !modifier_info) {
|
|
|
|
const VkFormat *fmt = av_vkfmt_from_pixfmt(hwfc->sw_format);
|
|
|
|
VkImageDrmFormatModifierListCreateInfoEXT *modifier_info;
|
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
|
|
|
VkDrmFormatModifierPropertiesEXT *mod_props;
|
|
|
|
uint64_t *modifiers;
|
|
|
|
int modifier_count = 0;
|
|
|
|
|
|
|
|
VkDrmFormatModifierPropertiesListEXT mod_props_list = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
|
|
|
|
.pNext = NULL,
|
|
|
|
.drmFormatModifierCount = 0,
|
|
|
|
.pDrmFormatModifierProperties = NULL,
|
|
|
|
};
|
|
|
|
VkFormatProperties2 prop = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
|
|
|
|
.pNext = &mod_props_list,
|
|
|
|
};
|
|
|
|
|
|
|
|
/* Get all supported modifiers */
|
|
|
|
vk->GetPhysicalDeviceFormatProperties2(dev_hwctx->phys_dev, fmt[0], &prop);
|
|
|
|
|
|
|
|
if (!mod_props_list.drmFormatModifierCount) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "There are no supported modifiers for the given sw_format\n");
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Createa structure to hold the modifier list info */
|
|
|
|
modifier_info = av_mallocz(sizeof(*modifier_info));
|
|
|
|
if (!modifier_info)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
modifier_info->pNext = NULL;
|
|
|
|
modifier_info->sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
|
|
|
|
|
|
|
|
/* Add structure to the image creation pNext chain */
|
|
|
|
if (!hwctx->create_pnext)
|
|
|
|
hwctx->create_pnext = modifier_info;
|
|
|
|
else
|
|
|
|
vk_link_struct(hwctx->create_pnext, (void *)modifier_info);
|
|
|
|
|
|
|
|
/* Backup the allocated struct to be freed later */
|
|
|
|
fp->modifier_info = modifier_info;
|
|
|
|
|
|
|
|
/* Allocate list of modifiers */
|
|
|
|
modifiers = av_mallocz(mod_props_list.drmFormatModifierCount *
|
|
|
|
sizeof(*modifiers));
|
|
|
|
if (!modifiers)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
modifier_info->pDrmFormatModifiers = modifiers;
|
|
|
|
|
|
|
|
/* Allocate a temporary list to hold all modifiers supported */
|
|
|
|
mod_props = av_mallocz(mod_props_list.drmFormatModifierCount *
|
|
|
|
sizeof(*mod_props));
|
|
|
|
if (!mod_props)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
mod_props_list.pDrmFormatModifierProperties = mod_props;
|
|
|
|
|
|
|
|
/* Finally get all modifiers from the device */
|
|
|
|
vk->GetPhysicalDeviceFormatProperties2(dev_hwctx->phys_dev, fmt[0], &prop);
|
|
|
|
|
|
|
|
/* Reject any modifiers that don't match our requirements */
|
|
|
|
for (int i = 0; i < mod_props_list.drmFormatModifierCount; i++) {
|
|
|
|
if (!(mod_props[i].drmFormatModifierTilingFeatures & hwctx->usage))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
modifiers[modifier_count++] = mod_props[i].drmFormatModifier;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!modifier_count) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "None of the given modifiers supports"
|
|
|
|
" the usage flags!\n");
|
|
|
|
av_freep(&mod_props);
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
}
|
|
|
|
|
|
|
|
modifier_info->drmFormatModifierCount = modifier_count;
|
|
|
|
av_freep(&mod_props);
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
err = create_exec_ctx(hwfc, &fp->conv_ctx,
|
2020-05-15 00:37:14 +02:00
|
|
|
dev_hwctx->queue_family_comp_index,
|
2021-11-07 09:16:11 +02:00
|
|
|
dev_hwctx->nb_comp_queues);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (err)
|
2020-11-25 13:06:00 +02:00
|
|
|
return err;
|
2020-05-15 01:01:08 +02:00
|
|
|
|
|
|
|
err = create_exec_ctx(hwfc, &fp->upload_ctx,
|
|
|
|
dev_hwctx->queue_family_tx_index,
|
2021-11-07 09:16:11 +02:00
|
|
|
dev_hwctx->nb_tx_queues);
|
2020-05-15 01:01:08 +02:00
|
|
|
if (err)
|
2020-11-25 13:06:00 +02:00
|
|
|
return err;
|
2020-05-15 01:01:08 +02:00
|
|
|
|
|
|
|
err = create_exec_ctx(hwfc, &fp->download_ctx,
|
|
|
|
dev_hwctx->queue_family_tx_index, 1);
|
|
|
|
if (err)
|
2020-11-25 13:06:00 +02:00
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Test to see if allocation will fail */
|
|
|
|
err = create_frame(hwfc, &f, hwctx->tiling, hwctx->usage,
|
|
|
|
hwctx->create_pnext);
|
2020-05-15 01:01:08 +02:00
|
|
|
if (err)
|
2020-11-25 13:06:00 +02:00
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
vulkan_frame_free(hwfc, (uint8_t *)f);
|
|
|
|
|
2020-05-15 00:59:22 +02:00
|
|
|
/* If user did not specify a pool, hwfc->pool will be set to the internal one
|
|
|
|
* in hwcontext.c just after this gets called */
|
|
|
|
if (!hwfc->pool) {
|
|
|
|
hwfc->internal->pool_internal = av_buffer_pool_init2(sizeof(AVVkFrame),
|
|
|
|
hwfc, vulkan_pool_alloc,
|
|
|
|
NULL);
|
2020-11-25 13:06:00 +02:00
|
|
|
if (!hwfc->internal->pool_internal)
|
|
|
|
return AVERROR(ENOMEM);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_get_buffer(AVHWFramesContext *hwfc, AVFrame *frame)
|
|
|
|
{
|
|
|
|
frame->buf[0] = av_buffer_pool_get(hwfc->pool);
|
|
|
|
if (!frame->buf[0])
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
frame->data[0] = frame->buf[0]->data;
|
|
|
|
frame->format = AV_PIX_FMT_VULKAN;
|
|
|
|
frame->width = hwfc->width;
|
|
|
|
frame->height = hwfc->height;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_transfer_get_formats(AVHWFramesContext *hwfc,
|
|
|
|
enum AVHWFrameTransferDirection dir,
|
|
|
|
enum AVPixelFormat **formats)
|
|
|
|
{
|
|
|
|
enum AVPixelFormat *fmts = av_malloc_array(2, sizeof(*fmts));
|
|
|
|
if (!fmts)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
fmts[0] = hwfc->sw_format;
|
|
|
|
fmts[1] = AV_PIX_FMT_NONE;
|
|
|
|
|
|
|
|
*formats = fmts;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
typedef struct VulkanMapping {
|
|
|
|
AVVkFrame *frame;
|
|
|
|
int flags;
|
|
|
|
} VulkanMapping;
|
|
|
|
|
|
|
|
static void vulkan_unmap_frame(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
|
|
|
|
{
|
|
|
|
VulkanMapping *map = hwmap->priv;
|
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Check if buffer needs flushing */
|
|
|
|
if ((map->flags & AV_HWFRAME_MAP_WRITE) &&
|
|
|
|
!(map->frame->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
|
|
|
|
VkResult ret;
|
|
|
|
VkMappedMemoryRange flush_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
flush_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
|
|
|
flush_ranges[i].memory = map->frame->mem[i];
|
|
|
|
flush_ranges[i].size = VK_WHOLE_SIZE;
|
|
|
|
}
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->FlushMappedMemoryRanges(hwctx->act_dev, planes,
|
|
|
|
flush_ranges);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Failed to flush memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->UnmapMemory(hwctx->act_dev, map->frame->mem[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
av_free(map);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_map_frame_to_mem(AVHWFramesContext *hwfc, AVFrame *dst,
|
|
|
|
const AVFrame *src, int flags)
|
|
|
|
{
|
|
|
|
VkResult ret;
|
2021-12-07 11:05:52 +02:00
|
|
|
int err, mapped_mem_count = 0, mem_planes = 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVkFrame *f = (AVVkFrame *)src->data[0];
|
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
2021-12-07 11:05:52 +02:00
|
|
|
AVVulkanFramesContext *hwfctx = hwfc->hwctx;
|
2019-08-28 22:58:10 +02:00
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
VulkanMapping *map = av_mallocz(sizeof(VulkanMapping));
|
|
|
|
if (!map)
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
|
|
|
|
if (src->format != AV_PIX_FMT_VULKAN) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Cannot map from pixel format %s!\n",
|
|
|
|
av_get_pix_fmt_name(src->format));
|
|
|
|
err = AVERROR(EINVAL);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!(f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ||
|
|
|
|
!(f->tiling == VK_IMAGE_TILING_LINEAR)) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Unable to map frame, not host visible "
|
|
|
|
"and linear!\n");
|
|
|
|
err = AVERROR(EINVAL);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
dst->width = src->width;
|
|
|
|
dst->height = src->height;
|
|
|
|
|
2021-12-07 11:05:52 +02:00
|
|
|
mem_planes = hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY ? 1 : planes;
|
|
|
|
for (int i = 0; i < mem_planes; i++) {
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->MapMemory(hwctx->act_dev, f->mem[i], 0,
|
|
|
|
VK_WHOLE_SIZE, 0, (void **)&dst->data[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Failed to map image memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
mapped_mem_count++;
|
|
|
|
}
|
|
|
|
|
2021-12-07 11:05:52 +02:00
|
|
|
if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY) {
|
|
|
|
for (int i = 0; i < planes; i++)
|
|
|
|
dst->data[i] = dst->data[0] + f->offset[i];
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
/* Check if the memory contents matter */
|
|
|
|
if (((flags & AV_HWFRAME_MAP_READ) || !(flags & AV_HWFRAME_MAP_OVERWRITE)) &&
|
|
|
|
!(f->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) {
|
|
|
|
VkMappedMemoryRange map_mem_ranges[AV_NUM_DATA_POINTERS] = { { 0 } };
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
map_mem_ranges[i].sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
|
|
|
|
map_mem_ranges[i].size = VK_WHOLE_SIZE;
|
|
|
|
map_mem_ranges[i].memory = f->mem[i];
|
|
|
|
}
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->InvalidateMappedMemoryRanges(hwctx->act_dev, planes,
|
|
|
|
map_mem_ranges);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Failed to invalidate memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
VkImageSubresource sub = {
|
|
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
|
|
};
|
|
|
|
VkSubresourceLayout layout;
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
|
2019-08-28 22:58:10 +02:00
|
|
|
dst->linesize[i] = layout.rowPitch;
|
|
|
|
}
|
|
|
|
|
|
|
|
map->frame = f;
|
|
|
|
map->flags = flags;
|
|
|
|
|
|
|
|
err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src,
|
|
|
|
&vulkan_unmap_frame, map);
|
|
|
|
if (err < 0)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
fail:
|
|
|
|
for (int i = 0; i < mapped_mem_count; i++)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->UnmapMemory(hwctx->act_dev, f->mem[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
av_free(map);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
#if CONFIG_LIBDRM
|
2021-11-13 16:13:03 +02:00
|
|
|
static void vulkan_unmap_from_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
2021-11-13 16:13:03 +02:00
|
|
|
AVVkFrame *f = hwmap->priv;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-13 15:22:11 +02:00
|
|
|
VkSemaphoreWaitInfo wait_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
|
|
|
|
.flags = 0x0,
|
2021-11-13 16:13:03 +02:00
|
|
|
.pSemaphores = f->sem,
|
|
|
|
.pValues = f->sem_value,
|
2021-11-13 15:22:11 +02:00
|
|
|
.semaphoreCount = planes,
|
|
|
|
};
|
|
|
|
|
|
|
|
vk->WaitSemaphores(hwctx->act_dev, &wait_info, UINT64_MAX);
|
|
|
|
|
2021-11-13 16:13:03 +02:00
|
|
|
vulkan_free_internal(f);
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
for (int i = 0; i < planes; i++) {
|
2021-11-13 16:13:03 +02:00
|
|
|
vk->DestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
|
|
|
|
vk->FreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
|
|
|
|
vk->DestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2021-11-13 16:13:03 +02:00
|
|
|
av_free(f);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static const struct {
|
|
|
|
uint32_t drm_fourcc;
|
|
|
|
VkFormat vk_format;
|
|
|
|
} vulkan_drm_format_map[] = {
|
|
|
|
{ DRM_FORMAT_R8, VK_FORMAT_R8_UNORM },
|
|
|
|
{ DRM_FORMAT_R16, VK_FORMAT_R16_UNORM },
|
|
|
|
{ DRM_FORMAT_GR88, VK_FORMAT_R8G8_UNORM },
|
|
|
|
{ DRM_FORMAT_RG88, VK_FORMAT_R8G8_UNORM },
|
|
|
|
{ DRM_FORMAT_GR1616, VK_FORMAT_R16G16_UNORM },
|
|
|
|
{ DRM_FORMAT_RG1616, VK_FORMAT_R16G16_UNORM },
|
|
|
|
{ DRM_FORMAT_ARGB8888, VK_FORMAT_B8G8R8A8_UNORM },
|
|
|
|
{ DRM_FORMAT_XRGB8888, VK_FORMAT_B8G8R8A8_UNORM },
|
|
|
|
{ DRM_FORMAT_ABGR8888, VK_FORMAT_R8G8B8A8_UNORM },
|
|
|
|
{ DRM_FORMAT_XBGR8888, VK_FORMAT_R8G8B8A8_UNORM },
|
lavu/hwcontext_vulkan: support mapping VUYX, P012, and XV36
If we want to be able to map between VAAPI and Vulkan (to do Vulkan
filtering), we need to have matching formats on each side.
The mappings here are not exact. In the same way that P010 is still
mapped to full 16 bit formats, P012 has to be mapped that way as well.
Similarly, VUYX has to be mapped to an alpha-equipped format, and XV36
has to be mapped to a fully 16bit alpha-equipped format.
While Vulkan seems to fundamentally lack formats with an undefined,
but physically present, alpha channel, it has have 10X6 and 12X4
formats that you could imagine using for P010, P012 and XV36, but these
formats don't support the STORAGE usage flag. Today, hwcontext_vulkan
requires all formats to be storable because it wants to be able to use
them to create writable images. Until that changes, which might happen,
we have to restrict the set of formats we use.
Finally, when mapping a Vulkan image back to vaapi, I observed that
the VK_FORMAT_R16G16B16A16_UNORM format we have to use for XV36 going
to Vulkan is mapped to Y416 when going to vaapi (which makes sense as
it's the exact matching format) so I had to add an entry for it even
though we don't use it directly.
2022-08-20 18:47:27 +02:00
|
|
|
|
|
|
|
// All these DRM_FORMATs were added in the same libdrm commit.
|
|
|
|
#ifdef DRM_FORMAT_XYUV8888
|
|
|
|
{ DRM_FORMAT_XYUV8888, VK_FORMAT_R8G8B8A8_UNORM },
|
|
|
|
{ DRM_FORMAT_XVYU12_16161616, VK_FORMAT_R16G16B16A16_UNORM} ,
|
|
|
|
// As we had to map XV36 to a 16bit Vulkan format, reverse mapping will
|
|
|
|
// end up yielding Y416 as the DRM format, so we need to recognise it.
|
|
|
|
{ DRM_FORMAT_Y416, VK_FORMAT_R16G16B16A16_UNORM },
|
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
static inline VkFormat drm_to_vulkan_fmt(uint32_t drm_fourcc)
|
|
|
|
{
|
|
|
|
for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
|
|
|
|
if (vulkan_drm_format_map[i].drm_fourcc == drm_fourcc)
|
|
|
|
return vulkan_drm_format_map[i].vk_format;
|
|
|
|
return VK_FORMAT_UNDEFINED;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_map_from_drm_frame_desc(AVHWFramesContext *hwfc, AVVkFrame **frame,
|
2021-01-21 06:35:13 +02:00
|
|
|
const AVFrame *src)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
int err = 0;
|
|
|
|
VkResult ret;
|
|
|
|
AVVkFrame *f;
|
2020-03-12 20:59:12 +02:00
|
|
|
int bind_counts = 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVHWDeviceContext *ctx = hwfc->device_ctx;
|
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2020-05-15 00:37:14 +02:00
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
2021-01-21 06:35:13 +02:00
|
|
|
const AVDRMFrameDescriptor *desc = (AVDRMFrameDescriptor *)src->data[0];
|
2021-11-05 07:55:42 +02:00
|
|
|
VkBindImageMemoryInfo bind_info[AV_DRM_MAX_PLANES];
|
|
|
|
VkBindImagePlaneMemoryInfo plane_info[AV_DRM_MAX_PLANES];
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
for (int i = 0; i < desc->nb_layers; i++) {
|
|
|
|
if (drm_to_vulkan_fmt(desc->layers[i].format) == VK_FORMAT_UNDEFINED) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unsupported DMABUF layer format %#08x!\n",
|
|
|
|
desc->layers[i].format);
|
|
|
|
return AVERROR(EINVAL);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!(f = av_vk_frame_alloc())) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Unable to allocate memory for AVVkFrame!\n");
|
|
|
|
err = AVERROR(ENOMEM);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
f->tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
for (int i = 0; i < desc->nb_layers; i++) {
|
2020-03-12 20:59:12 +02:00
|
|
|
const int planes = desc->layers[i].nb_planes;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
/* Semaphore */
|
2021-11-04 13:17:06 +02:00
|
|
|
VkSemaphoreTypeCreateInfo sem_type_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
|
|
|
|
.semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE,
|
2021-11-05 07:55:42 +02:00
|
|
|
.initialValue = 0,
|
2021-11-04 13:17:06 +02:00
|
|
|
};
|
2020-05-12 00:27:01 +02:00
|
|
|
VkSemaphoreCreateInfo sem_spawn = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
|
2021-11-04 13:17:06 +02:00
|
|
|
.pNext = &sem_type_info,
|
2020-05-12 00:27:01 +02:00
|
|
|
};
|
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
/* Image creation */
|
|
|
|
VkSubresourceLayout ext_img_layouts[AV_DRM_MAX_PLANES];
|
|
|
|
VkImageDrmFormatModifierExplicitCreateInfoEXT ext_img_mod_spec = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
|
|
|
|
.drmFormatModifier = desc->objects[0].format_modifier,
|
|
|
|
.drmFormatModifierPlaneCount = planes,
|
|
|
|
.pPlaneLayouts = (const VkSubresourceLayout *)&ext_img_layouts,
|
|
|
|
};
|
|
|
|
VkExternalMemoryImageCreateInfo ext_img_spec = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
|
|
|
|
.pNext = &ext_img_mod_spec,
|
|
|
|
.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
|
|
|
|
};
|
2020-11-25 23:32:48 +02:00
|
|
|
VkImageCreateInfo create_info = {
|
2020-05-10 22:53:39 +02:00
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
|
2021-11-05 07:55:42 +02:00
|
|
|
.pNext = &ext_img_spec,
|
2020-05-10 22:53:39 +02:00
|
|
|
.imageType = VK_IMAGE_TYPE_2D,
|
|
|
|
.format = drm_to_vulkan_fmt(desc->layers[i].format),
|
|
|
|
.extent.depth = 1,
|
|
|
|
.mipLevels = 1,
|
|
|
|
.arrayLayers = 1,
|
2021-11-05 07:55:42 +02:00
|
|
|
.flags = 0x0, /* ALIAS flag is implicit for imported images */
|
2020-05-10 22:53:39 +02:00
|
|
|
.tiling = f->tiling,
|
|
|
|
.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED, /* specs say so */
|
2021-11-05 07:55:42 +02:00
|
|
|
.usage = VK_IMAGE_USAGE_SAMPLED_BIT |
|
|
|
|
VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
|
2020-05-10 22:53:39 +02:00
|
|
|
.samples = VK_SAMPLE_COUNT_1_BIT,
|
|
|
|
.pQueueFamilyIndices = p->qfs,
|
|
|
|
.queueFamilyIndexCount = p->num_qfs,
|
|
|
|
.sharingMode = p->num_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
|
|
|
|
VK_SHARING_MODE_EXCLUSIVE,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
/* Image format verification */
|
2021-11-13 12:11:47 +02:00
|
|
|
VkExternalImageFormatProperties ext_props = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
|
|
|
|
};
|
2021-11-05 07:55:42 +02:00
|
|
|
VkImageFormatProperties2 props_ret = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
|
2021-11-13 12:11:47 +02:00
|
|
|
.pNext = &ext_props,
|
2021-11-05 07:55:42 +02:00
|
|
|
};
|
|
|
|
VkPhysicalDeviceImageDrmFormatModifierInfoEXT props_drm_mod = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
|
|
|
|
.drmFormatModifier = ext_img_mod_spec.drmFormatModifier,
|
|
|
|
.pQueueFamilyIndices = create_info.pQueueFamilyIndices,
|
|
|
|
.queueFamilyIndexCount = create_info.queueFamilyIndexCount,
|
|
|
|
.sharingMode = create_info.sharingMode,
|
|
|
|
};
|
|
|
|
VkPhysicalDeviceExternalImageFormatInfo props_ext = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
|
|
|
|
.pNext = &props_drm_mod,
|
|
|
|
.handleType = ext_img_spec.handleTypes,
|
|
|
|
};
|
|
|
|
VkPhysicalDeviceImageFormatInfo2 fmt_props = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
|
|
|
|
.pNext = &props_ext,
|
|
|
|
.format = create_info.format,
|
|
|
|
.type = create_info.imageType,
|
|
|
|
.tiling = create_info.tiling,
|
|
|
|
.usage = create_info.usage,
|
|
|
|
.flags = create_info.flags,
|
|
|
|
};
|
|
|
|
|
|
|
|
/* Check if importing is possible for this combination of parameters */
|
|
|
|
ret = vk->GetPhysicalDeviceImageFormatProperties2(hwctx->phys_dev,
|
|
|
|
&fmt_props, &props_ret);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Cannot map DRM frame to Vulkan: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Set the image width/height */
|
2020-11-25 23:32:48 +02:00
|
|
|
get_plane_wh(&create_info.extent.width, &create_info.extent.height,
|
2021-01-21 06:35:13 +02:00
|
|
|
hwfc->sw_format, src->width, src->height, i);
|
2020-11-25 23:32:48 +02:00
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
/* Set the subresource layout based on the layer properties */
|
2020-03-12 20:59:12 +02:00
|
|
|
for (int j = 0; j < planes; j++) {
|
2021-11-05 07:55:42 +02:00
|
|
|
ext_img_layouts[j].offset = desc->layers[i].planes[j].offset;
|
|
|
|
ext_img_layouts[j].rowPitch = desc->layers[i].planes[j].pitch;
|
|
|
|
ext_img_layouts[j].size = 0; /* The specs say so for all 3 */
|
|
|
|
ext_img_layouts[j].arrayPitch = 0;
|
|
|
|
ext_img_layouts[j].depthPitch = 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Create image */
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateImage(hwctx->act_dev, &create_info,
|
|
|
|
hwctx->alloc, &f->img[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Image creation failure: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR(EINVAL);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateSemaphore(hwctx->act_dev, &sem_spawn,
|
|
|
|
hwctx->alloc, &f->sem[i]);
|
2020-05-12 00:27:01 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwctx, AV_LOG_ERROR, "Failed to create semaphore: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* We'd import a semaphore onto the one we created using
|
|
|
|
* vkImportSemaphoreFdKHR but unfortunately neither DRM nor VAAPI
|
|
|
|
* offer us anything we could import and sync with, so instead
|
|
|
|
* just signal the semaphore we created. */
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
f->layout[i] = create_info.initialLayout;
|
2019-08-28 22:58:10 +02:00
|
|
|
f->access[i] = 0x0;
|
2021-11-04 13:17:06 +02:00
|
|
|
f->sem_value[i] = 0;
|
2020-05-25 21:57:16 +02:00
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-05-25 21:57:16 +02:00
|
|
|
for (int i = 0; i < desc->nb_objects; i++) {
|
2021-11-05 07:55:42 +02:00
|
|
|
/* Memory requirements */
|
|
|
|
VkImageMemoryRequirementsInfo2 req_desc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
|
|
|
|
.image = f->img[i],
|
|
|
|
};
|
|
|
|
VkMemoryDedicatedRequirements ded_req = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
|
|
|
|
};
|
|
|
|
VkMemoryRequirements2 req2 = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
|
|
|
|
.pNext = &ded_req,
|
|
|
|
};
|
|
|
|
|
|
|
|
/* Allocation/importing */
|
2020-05-25 21:57:16 +02:00
|
|
|
VkMemoryFdPropertiesKHR fdmp = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
|
|
|
|
};
|
|
|
|
VkImportMemoryFdInfoKHR idesc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
|
|
|
|
.fd = dup(desc->objects[i].fd),
|
2021-11-05 07:55:42 +02:00
|
|
|
.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
|
2020-05-25 21:57:16 +02:00
|
|
|
};
|
|
|
|
VkMemoryDedicatedAllocateInfo ded_alloc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
|
|
|
|
.pNext = &idesc,
|
2021-11-05 07:55:42 +02:00
|
|
|
.image = req_desc.image,
|
2020-05-25 21:57:16 +02:00
|
|
|
};
|
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
/* Get object properties */
|
|
|
|
ret = vk->GetMemoryFdPropertiesKHR(hwctx->act_dev,
|
|
|
|
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
|
2021-04-29 02:44:41 +02:00
|
|
|
idesc.fd, &fdmp);
|
2020-05-25 21:57:16 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Failed to get FD properties: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
close(idesc.fd);
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
vk->GetImageMemoryRequirements2(hwctx->act_dev, &req_desc, &req2);
|
2020-05-25 21:57:16 +02:00
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
/* Only a single bit must be set, not a range, and it must match */
|
|
|
|
req2.memoryRequirements.memoryTypeBits = fdmp.memoryTypeBits;
|
2020-05-25 21:57:16 +02:00
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
err = alloc_mem(ctx, &req2.memoryRequirements,
|
|
|
|
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
|
|
|
|
(ded_req.prefersDedicatedAllocation ||
|
|
|
|
ded_req.requiresDedicatedAllocation) ?
|
|
|
|
&ded_alloc : ded_alloc.pNext,
|
2020-05-25 21:57:16 +02:00
|
|
|
&f->flags, &f->mem[i]);
|
|
|
|
if (err) {
|
|
|
|
close(idesc.fd);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
f->size[i] = req2.memoryRequirements.size;
|
2020-05-25 21:57:16 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < desc->nb_layers; i++) {
|
|
|
|
const int planes = desc->layers[i].nb_planes;
|
2020-03-12 20:59:12 +02:00
|
|
|
for (int j = 0; j < planes; j++) {
|
|
|
|
VkImageAspectFlagBits aspect = j == 0 ? VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
|
|
|
|
j == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
|
|
|
|
VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
|
|
|
|
|
|
|
|
plane_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
|
2021-11-05 07:55:42 +02:00
|
|
|
plane_info[bind_counts].pNext = NULL;
|
2020-03-12 20:59:12 +02:00
|
|
|
plane_info[bind_counts].planeAspect = aspect;
|
|
|
|
|
|
|
|
bind_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
|
2021-11-05 07:55:42 +02:00
|
|
|
bind_info[bind_counts].pNext = planes > 1 ? &plane_info[bind_counts] : NULL;
|
2020-03-12 20:59:12 +02:00
|
|
|
bind_info[bind_counts].image = f->img[i];
|
|
|
|
bind_info[bind_counts].memory = f->mem[desc->layers[i].planes[j].object_index];
|
2021-11-05 07:55:42 +02:00
|
|
|
|
|
|
|
/* Offset is already signalled via pPlaneLayouts above */
|
|
|
|
bind_info[bind_counts].memoryOffset = 0;
|
|
|
|
|
2020-03-12 20:59:12 +02:00
|
|
|
bind_counts++;
|
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Bind the allocated memory to the images */
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->BindImageMemory2(hwctx->act_dev, bind_counts, bind_info);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to bind memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
2021-11-05 07:55:42 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2021-11-13 00:54:36 +02:00
|
|
|
err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_EXTERNAL_IMPORT);
|
2020-03-12 20:01:07 +02:00
|
|
|
if (err)
|
|
|
|
goto fail;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
*frame = f;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
fail:
|
2020-05-12 00:27:01 +02:00
|
|
|
for (int i = 0; i < desc->nb_layers; i++) {
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->DestroyImage(hwctx->act_dev, f->img[i], hwctx->alloc);
|
|
|
|
vk->DestroySemaphore(hwctx->act_dev, f->sem[i], hwctx->alloc);
|
2020-05-12 00:27:01 +02:00
|
|
|
}
|
2020-03-12 18:46:01 +02:00
|
|
|
for (int i = 0; i < desc->nb_objects; i++)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->FreeMemory(hwctx->act_dev, f->mem[i], hwctx->alloc);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
av_free(f);
|
|
|
|
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_map_from_drm(AVHWFramesContext *hwfc, AVFrame *dst,
|
|
|
|
const AVFrame *src, int flags)
|
|
|
|
{
|
|
|
|
int err = 0;
|
|
|
|
AVVkFrame *f;
|
|
|
|
|
2021-01-21 06:35:13 +02:00
|
|
|
if ((err = vulkan_map_from_drm_frame_desc(hwfc, &f, src)))
|
2019-08-28 22:58:10 +02:00
|
|
|
return err;
|
|
|
|
|
|
|
|
/* The unmapping function will free this */
|
|
|
|
dst->data[0] = (uint8_t *)f;
|
|
|
|
dst->width = src->width;
|
|
|
|
dst->height = src->height;
|
|
|
|
|
|
|
|
err = ff_hwframe_map_create(dst->hw_frames_ctx, dst, src,
|
2021-11-13 16:13:03 +02:00
|
|
|
&vulkan_unmap_from_drm, f);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (err < 0)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
av_log(hwfc, AV_LOG_DEBUG, "Mapped DRM object to Vulkan!\n");
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
fail:
|
|
|
|
vulkan_frame_free(hwfc->device_ctx->hwctx, (uint8_t *)f);
|
2022-01-19 14:08:24 +02:00
|
|
|
dst->data[0] = NULL;
|
2019-08-28 22:58:10 +02:00
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
#if CONFIG_VAAPI
|
|
|
|
static int vulkan_map_from_vaapi(AVHWFramesContext *dst_fc,
|
|
|
|
AVFrame *dst, const AVFrame *src,
|
|
|
|
int flags)
|
|
|
|
{
|
|
|
|
int err;
|
|
|
|
AVFrame *tmp = av_frame_alloc();
|
|
|
|
AVHWFramesContext *vaapi_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
|
|
|
|
AVVAAPIDeviceContext *vaapi_ctx = vaapi_fc->device_ctx->hwctx;
|
|
|
|
VASurfaceID surface_id = (VASurfaceID)(uintptr_t)src->data[3];
|
|
|
|
|
|
|
|
if (!tmp)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
/* We have to sync since like the previous comment said, no semaphores */
|
|
|
|
vaSyncSurface(vaapi_ctx->display, surface_id);
|
|
|
|
|
|
|
|
tmp->format = AV_PIX_FMT_DRM_PRIME;
|
|
|
|
|
|
|
|
err = av_hwframe_map(tmp, src, flags);
|
|
|
|
if (err < 0)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
err = vulkan_map_from_drm(dst_fc, dst, tmp, flags);
|
|
|
|
if (err < 0)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
err = ff_hwframe_map_replace(dst, src);
|
|
|
|
|
|
|
|
fail:
|
|
|
|
av_frame_free(&tmp);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
static int vulkan_export_to_cuda(AVHWFramesContext *hwfc,
|
|
|
|
AVBufferRef *cuda_hwfc,
|
|
|
|
const AVFrame *frame)
|
|
|
|
{
|
|
|
|
int err;
|
|
|
|
VkResult ret;
|
|
|
|
AVVkFrame *dst_f;
|
|
|
|
AVVkFrameInternal *dst_int;
|
|
|
|
AVHWDeviceContext *ctx = hwfc->device_ctx;
|
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
|
|
|
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
AVHWFramesContext *cuda_fc = (AVHWFramesContext*)cuda_hwfc->data;
|
|
|
|
AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
|
|
|
|
AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
|
|
|
|
AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
|
|
|
|
CudaFunctions *cu = cu_internal->cuda_dl;
|
|
|
|
CUarray_format cufmt = desc->comp[0].depth > 8 ? CU_AD_FORMAT_UNSIGNED_INT16 :
|
|
|
|
CU_AD_FORMAT_UNSIGNED_INT8;
|
|
|
|
|
|
|
|
dst_f = (AVVkFrame *)frame->data[0];
|
|
|
|
|
|
|
|
dst_int = dst_f->internal;
|
|
|
|
if (!dst_int || !dst_int->cuda_fc_ref) {
|
|
|
|
if (!dst_f->internal)
|
|
|
|
dst_f->internal = dst_int = av_mallocz(sizeof(*dst_f->internal));
|
|
|
|
|
2021-11-13 05:14:04 +02:00
|
|
|
if (!dst_int)
|
|
|
|
return AVERROR(ENOMEM);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
dst_int->cuda_fc_ref = av_buffer_ref(cuda_hwfc);
|
|
|
|
if (!dst_int->cuda_fc_ref) {
|
2021-11-13 05:14:04 +02:00
|
|
|
av_freep(&dst_f->internal);
|
|
|
|
return AVERROR(ENOMEM);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
CUDA_EXTERNAL_MEMORY_MIPMAPPED_ARRAY_DESC tex_desc = {
|
|
|
|
.offset = 0,
|
|
|
|
.arrayDesc = {
|
|
|
|
.Depth = 0,
|
|
|
|
.Format = cufmt,
|
|
|
|
.NumChannels = 1 + ((planes == 2) && i),
|
|
|
|
.Flags = 0,
|
|
|
|
},
|
|
|
|
.numLevels = 1,
|
|
|
|
};
|
2021-11-13 21:00:50 +02:00
|
|
|
int p_w, p_h;
|
|
|
|
|
|
|
|
#ifdef _WIN32
|
|
|
|
CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
|
|
|
|
.type = IsWindows8OrGreater()
|
|
|
|
? CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32
|
|
|
|
: CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT,
|
|
|
|
.size = dst_f->size[i],
|
|
|
|
};
|
|
|
|
VkMemoryGetWin32HandleInfoKHR export_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
|
|
|
|
.memory = dst_f->mem[i],
|
|
|
|
.handleType = IsWindows8OrGreater()
|
|
|
|
? VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT
|
|
|
|
: VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
|
|
|
|
};
|
|
|
|
VkSemaphoreGetWin32HandleInfoKHR sem_export = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
|
|
|
|
.semaphore = dst_f->sem[i],
|
|
|
|
.handleType = IsWindows8OrGreater()
|
|
|
|
? VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT
|
|
|
|
: VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
|
|
|
|
};
|
|
|
|
CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
|
|
|
|
.type = 10 /* TODO: CU_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TIMELINE_SEMAPHORE_WIN32 */,
|
|
|
|
};
|
|
|
|
|
|
|
|
ret = vk->GetMemoryWin32HandleKHR(hwctx->act_dev, &export_info,
|
|
|
|
&ext_desc.handle.win32.handle);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a Win32 Handle: %s!\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
dst_int->ext_mem_handle[i] = ext_desc.handle.win32.handle;
|
|
|
|
#else
|
2019-08-28 22:58:10 +02:00
|
|
|
CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
|
|
|
|
.type = CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD,
|
|
|
|
.size = dst_f->size[i],
|
|
|
|
};
|
|
|
|
VkMemoryGetFdInfoKHR export_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
|
|
|
|
.memory = dst_f->mem[i],
|
|
|
|
.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
|
|
|
|
};
|
2020-05-12 00:27:01 +02:00
|
|
|
VkSemaphoreGetFdInfoKHR sem_export = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
|
|
|
|
.semaphore = dst_f->sem[i],
|
|
|
|
.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
|
|
|
|
};
|
|
|
|
CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
|
2021-11-05 14:50:32 +02:00
|
|
|
.type = 9 /* TODO: CU_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TIMELINE_SEMAPHORE_FD */,
|
2020-05-12 00:27:01 +02:00
|
|
|
};
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->GetMemoryFdKHR(hwctx->act_dev, &export_info,
|
|
|
|
&ext_desc.handle.fd);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
2021-11-13 05:14:04 +02:00
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD: %s!\n",
|
|
|
|
vk_ret2str(ret));
|
2019-08-28 22:58:10 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
ret = CHECK_CU(cu->cuImportExternalMemory(&dst_int->ext_mem[i], &ext_desc));
|
|
|
|
if (ret < 0) {
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifndef _WIN32
|
2021-11-13 01:40:46 +02:00
|
|
|
close(ext_desc.handle.fd);
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2021-11-13 21:00:50 +02:00
|
|
|
get_plane_wh(&p_w, &p_h, hwfc->sw_format, hwfc->width, hwfc->height, i);
|
|
|
|
tex_desc.arrayDesc.Width = p_w;
|
|
|
|
tex_desc.arrayDesc.Height = p_h;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
ret = CHECK_CU(cu->cuExternalMemoryGetMappedMipmappedArray(&dst_int->cu_mma[i],
|
|
|
|
dst_int->ext_mem[i],
|
|
|
|
&tex_desc));
|
|
|
|
if (ret < 0) {
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = CHECK_CU(cu->cuMipmappedArrayGetLevel(&dst_int->cu_array[i],
|
|
|
|
dst_int->cu_mma[i], 0));
|
|
|
|
if (ret < 0) {
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
ret = vk->GetSemaphoreWin32HandleKHR(hwctx->act_dev, &sem_export,
|
|
|
|
&ext_sem_desc.handle.win32.handle);
|
|
|
|
#else
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->GetSemaphoreFdKHR(hwctx->act_dev, &sem_export,
|
2021-11-13 01:14:10 +02:00
|
|
|
&ext_sem_desc.handle.fd);
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2020-05-12 00:27:01 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to export semaphore: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
dst_int->ext_sem_handle[i] = ext_sem_desc.handle.win32.handle;
|
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-05-12 00:27:01 +02:00
|
|
|
ret = CHECK_CU(cu->cuImportExternalSemaphore(&dst_int->cu_sem[i],
|
|
|
|
&ext_sem_desc));
|
|
|
|
if (ret < 0) {
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifndef _WIN32
|
2021-11-05 14:50:32 +02:00
|
|
|
close(ext_sem_desc.handle.fd);
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2020-05-12 00:27:01 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
fail:
|
2021-11-13 05:14:04 +02:00
|
|
|
vulkan_free_internal(dst_f);
|
2019-08-28 22:58:10 +02:00
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_transfer_data_from_cuda(AVHWFramesContext *hwfc,
|
|
|
|
AVFrame *dst, const AVFrame *src)
|
|
|
|
{
|
|
|
|
int err;
|
|
|
|
CUcontext dummy;
|
|
|
|
AVVkFrame *dst_f;
|
|
|
|
AVVkFrameInternal *dst_int;
|
2021-11-13 00:51:11 +02:00
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
2019-08-28 22:58:10 +02:00
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
|
|
|
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
|
|
|
|
|
|
|
|
AVHWFramesContext *cuda_fc = (AVHWFramesContext*)src->hw_frames_ctx->data;
|
|
|
|
AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
|
|
|
|
AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
|
|
|
|
AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
|
|
|
|
CudaFunctions *cu = cu_internal->cuda_dl;
|
2020-05-12 00:27:01 +02:00
|
|
|
CUDA_EXTERNAL_SEMAPHORE_WAIT_PARAMS s_w_par[AV_NUM_DATA_POINTERS] = { 0 };
|
|
|
|
CUDA_EXTERNAL_SEMAPHORE_SIGNAL_PARAMS s_s_par[AV_NUM_DATA_POINTERS] = { 0 };
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
dst_f = (AVVkFrame *)dst->data[0];
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_EXPORT);
|
|
|
|
if (err < 0)
|
|
|
|
return err;
|
|
|
|
|
|
|
|
err = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
|
|
|
|
if (err < 0)
|
|
|
|
return err;
|
|
|
|
|
|
|
|
err = vulkan_export_to_cuda(hwfc, src->hw_frames_ctx, dst);
|
|
|
|
if (err < 0) {
|
2020-11-22 23:56:33 +02:00
|
|
|
CHECK_CU(cu->cuCtxPopCurrent(&dummy));
|
2021-11-13 00:51:11 +02:00
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
2020-11-22 23:56:33 +02:00
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
dst_int = dst_f->internal;
|
|
|
|
|
2021-11-05 14:50:32 +02:00
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
s_w_par[i].params.fence.value = dst_f->sem_value[i] + 0;
|
|
|
|
s_s_par[i].params.fence.value = dst_f->sem_value[i] + 1;
|
|
|
|
}
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
|
2020-05-12 00:27:01 +02:00
|
|
|
planes, cuda_dev->stream));
|
2021-11-13 00:51:11 +02:00
|
|
|
if (err < 0)
|
2019-08-28 22:58:10 +02:00
|
|
|
goto fail;
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
CUDA_MEMCPY2D cpy = {
|
|
|
|
.srcMemoryType = CU_MEMORYTYPE_DEVICE,
|
|
|
|
.srcDevice = (CUdeviceptr)src->data[i],
|
|
|
|
.srcPitch = src->linesize[i],
|
|
|
|
.srcY = 0,
|
|
|
|
|
|
|
|
.dstMemoryType = CU_MEMORYTYPE_ARRAY,
|
|
|
|
.dstArray = dst_int->cu_array[i],
|
|
|
|
};
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
int p_w, p_h;
|
|
|
|
get_plane_wh(&p_w, &p_h, hwfc->sw_format, hwfc->width, hwfc->height, i);
|
|
|
|
|
|
|
|
cpy.WidthInBytes = p_w * desc->comp[i].step;
|
|
|
|
cpy.Height = p_h;
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
|
|
|
|
if (err < 0)
|
2019-08-28 22:58:10 +02:00
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
|
2020-05-12 00:27:01 +02:00
|
|
|
planes, cuda_dev->stream));
|
2021-11-13 00:51:11 +02:00
|
|
|
if (err < 0)
|
2019-08-28 22:58:10 +02:00
|
|
|
goto fail;
|
|
|
|
|
2021-11-05 14:50:32 +02:00
|
|
|
for (int i = 0; i < planes; i++)
|
|
|
|
dst_f->sem_value[i]++;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
CHECK_CU(cu->cuCtxPopCurrent(&dummy));
|
|
|
|
|
|
|
|
av_log(hwfc, AV_LOG_VERBOSE, "Transfered CUDA image to Vulkan!\n");
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
return err = prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_IMPORT);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
fail:
|
|
|
|
CHECK_CU(cu->cuCtxPopCurrent(&dummy));
|
2021-11-13 05:14:04 +02:00
|
|
|
vulkan_free_internal(dst_f);
|
2019-08-28 22:58:10 +02:00
|
|
|
dst_f->internal = NULL;
|
|
|
|
av_buffer_unref(&dst->buf[0]);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static int vulkan_map_to(AVHWFramesContext *hwfc, AVFrame *dst,
|
|
|
|
const AVFrame *src, int flags)
|
|
|
|
{
|
|
|
|
av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
|
|
|
|
|
|
|
switch (src->format) {
|
|
|
|
#if CONFIG_LIBDRM
|
|
|
|
#if CONFIG_VAAPI
|
|
|
|
case AV_PIX_FMT_VAAPI:
|
2021-11-07 16:57:35 +02:00
|
|
|
if (p->extensions & (FF_VK_EXT_EXTERNAL_DMABUF_MEMORY | FF_VK_EXT_DRM_MODIFIER_FLAGS))
|
2019-08-28 22:58:10 +02:00
|
|
|
return vulkan_map_from_vaapi(hwfc, dst, src, flags);
|
2021-11-05 07:55:42 +02:00
|
|
|
else
|
|
|
|
return AVERROR(ENOSYS);
|
2019-08-28 22:58:10 +02:00
|
|
|
#endif
|
|
|
|
case AV_PIX_FMT_DRM_PRIME:
|
2021-11-07 16:57:35 +02:00
|
|
|
if (p->extensions & (FF_VK_EXT_EXTERNAL_DMABUF_MEMORY | FF_VK_EXT_DRM_MODIFIER_FLAGS))
|
2019-08-28 22:58:10 +02:00
|
|
|
return vulkan_map_from_drm(hwfc, dst, src, flags);
|
2021-11-05 07:55:42 +02:00
|
|
|
else
|
|
|
|
return AVERROR(ENOSYS);
|
2019-08-28 22:58:10 +02:00
|
|
|
#endif
|
|
|
|
default:
|
|
|
|
return AVERROR(ENOSYS);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#if CONFIG_LIBDRM
|
|
|
|
typedef struct VulkanDRMMapping {
|
|
|
|
AVDRMFrameDescriptor drm_desc;
|
|
|
|
AVVkFrame *source;
|
|
|
|
} VulkanDRMMapping;
|
|
|
|
|
|
|
|
static void vulkan_unmap_to_drm(AVHWFramesContext *hwfc, HWMapDescriptor *hwmap)
|
|
|
|
{
|
|
|
|
AVDRMFrameDescriptor *drm_desc = hwmap->priv;
|
|
|
|
|
|
|
|
for (int i = 0; i < drm_desc->nb_objects; i++)
|
|
|
|
close(drm_desc->objects[i].fd);
|
|
|
|
|
|
|
|
av_free(drm_desc);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline uint32_t vulkan_fmt_to_drm(VkFormat vkfmt)
|
|
|
|
{
|
|
|
|
for (int i = 0; i < FF_ARRAY_ELEMS(vulkan_drm_format_map); i++)
|
|
|
|
if (vulkan_drm_format_map[i].vk_format == vkfmt)
|
|
|
|
return vulkan_drm_format_map[i].drm_fourcc;
|
|
|
|
return DRM_FORMAT_INVALID;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_map_to_drm(AVHWFramesContext *hwfc, AVFrame *dst,
|
|
|
|
const AVFrame *src, int flags)
|
|
|
|
{
|
|
|
|
int err = 0;
|
|
|
|
VkResult ret;
|
|
|
|
AVVkFrame *f = (AVVkFrame *)src->data[0];
|
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2020-05-15 00:37:14 +02:00
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = hwfc->device_ctx->hwctx;
|
2021-12-07 11:05:51 +02:00
|
|
|
AVVulkanFramesContext *hwfctx = hwfc->hwctx;
|
2019-08-28 22:58:10 +02:00
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
|
|
|
VkImageDrmFormatModifierPropertiesEXT drm_mod = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
|
|
|
|
};
|
2021-12-10 18:16:49 +02:00
|
|
|
VkSemaphoreWaitInfo wait_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
|
|
|
|
.flags = 0x0,
|
|
|
|
.semaphoreCount = planes,
|
|
|
|
};
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
AVDRMFrameDescriptor *drm_desc = av_mallocz(sizeof(*drm_desc));
|
|
|
|
if (!drm_desc)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
2020-05-15 00:37:14 +02:00
|
|
|
err = prepare_frame(hwfc, &fp->conv_ctx, f, PREP_MODE_EXTERNAL_EXPORT);
|
2020-05-10 23:00:38 +02:00
|
|
|
if (err < 0)
|
|
|
|
goto end;
|
|
|
|
|
2021-12-10 18:16:49 +02:00
|
|
|
/* Wait for the operation to finish so we can cleanly export it. */
|
|
|
|
wait_info.pSemaphores = f->sem;
|
|
|
|
wait_info.pValues = f->sem_value;
|
|
|
|
|
|
|
|
vk->WaitSemaphores(hwctx->act_dev, &wait_info, UINT64_MAX);
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
err = ff_hwframe_map_create(src->hw_frames_ctx, dst, src, &vulkan_unmap_to_drm, drm_desc);
|
|
|
|
if (err < 0)
|
|
|
|
goto end;
|
|
|
|
|
2021-11-05 07:55:42 +02:00
|
|
|
ret = vk->GetImageDrmFormatModifierPropertiesEXT(hwctx->act_dev, f->img[0],
|
|
|
|
&drm_mod);
|
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Failed to retrieve DRM format modifier!\n");
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto end;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (int i = 0; (i < planes) && (f->mem[i]); i++) {
|
|
|
|
VkMemoryGetFdInfoKHR export_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
|
|
|
|
.memory = f->mem[i],
|
|
|
|
.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
|
|
|
|
};
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->GetMemoryFdKHR(hwctx->act_dev, &export_info,
|
|
|
|
&drm_desc->objects[i].fd);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Unable to export the image as a FD!\n");
|
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
|
|
|
drm_desc->nb_objects++;
|
|
|
|
drm_desc->objects[i].size = f->size[i];
|
|
|
|
drm_desc->objects[i].format_modifier = drm_mod.drmFormatModifier;
|
|
|
|
}
|
|
|
|
|
|
|
|
drm_desc->nb_layers = planes;
|
|
|
|
for (int i = 0; i < drm_desc->nb_layers; i++) {
|
|
|
|
VkSubresourceLayout layout;
|
|
|
|
VkImageSubresource sub = {
|
2021-11-05 07:55:42 +02:00
|
|
|
.aspectMask = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
VkFormat plane_vkfmt = av_vkfmt_from_pixfmt(hwfc->sw_format)[i];
|
|
|
|
|
|
|
|
drm_desc->layers[i].format = vulkan_fmt_to_drm(plane_vkfmt);
|
|
|
|
drm_desc->layers[i].nb_planes = 1;
|
|
|
|
|
|
|
|
if (drm_desc->layers[i].format == DRM_FORMAT_INVALID) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Cannot map to DRM layer, unsupported!\n");
|
|
|
|
err = AVERROR_PATCHWELCOME;
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
|
|
|
drm_desc->layers[i].planes[0].object_index = FFMIN(i, drm_desc->nb_objects - 1);
|
|
|
|
|
2020-05-10 22:30:15 +02:00
|
|
|
if (f->tiling == VK_IMAGE_TILING_OPTIMAL)
|
2019-08-28 22:58:10 +02:00
|
|
|
continue;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetImageSubresourceLayout(hwctx->act_dev, f->img[i], &sub, &layout);
|
2021-12-07 11:05:51 +02:00
|
|
|
drm_desc->layers[i].planes[0].offset = layout.offset;
|
|
|
|
drm_desc->layers[i].planes[0].pitch = layout.rowPitch;
|
|
|
|
|
|
|
|
if (hwfctx->flags & AV_VK_FRAME_FLAG_CONTIGUOUS_MEMORY)
|
|
|
|
drm_desc->layers[i].planes[0].offset += f->offset[i];
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
dst->width = src->width;
|
|
|
|
dst->height = src->height;
|
|
|
|
dst->data[0] = (uint8_t *)drm_desc;
|
|
|
|
|
|
|
|
av_log(hwfc, AV_LOG_VERBOSE, "Mapped AVVkFrame to a DRM object!\n");
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
end:
|
|
|
|
av_free(drm_desc);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
#if CONFIG_VAAPI
|
|
|
|
static int vulkan_map_to_vaapi(AVHWFramesContext *hwfc, AVFrame *dst,
|
|
|
|
const AVFrame *src, int flags)
|
|
|
|
{
|
|
|
|
int err;
|
|
|
|
AVFrame *tmp = av_frame_alloc();
|
|
|
|
if (!tmp)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
|
|
|
tmp->format = AV_PIX_FMT_DRM_PRIME;
|
|
|
|
|
|
|
|
err = vulkan_map_to_drm(hwfc, tmp, src, flags);
|
|
|
|
if (err < 0)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
err = av_hwframe_map(dst, tmp, flags);
|
|
|
|
if (err < 0)
|
|
|
|
goto fail;
|
|
|
|
|
|
|
|
err = ff_hwframe_map_replace(dst, src);
|
|
|
|
|
|
|
|
fail:
|
|
|
|
av_frame_free(&tmp);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static int vulkan_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
|
|
|
|
const AVFrame *src, int flags)
|
|
|
|
{
|
|
|
|
av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
|
|
|
|
|
|
|
switch (dst->format) {
|
|
|
|
#if CONFIG_LIBDRM
|
|
|
|
case AV_PIX_FMT_DRM_PRIME:
|
2021-11-07 16:57:35 +02:00
|
|
|
if (p->extensions & (FF_VK_EXT_EXTERNAL_DMABUF_MEMORY | FF_VK_EXT_DRM_MODIFIER_FLAGS))
|
2019-08-28 22:58:10 +02:00
|
|
|
return vulkan_map_to_drm(hwfc, dst, src, flags);
|
2021-11-05 07:55:42 +02:00
|
|
|
else
|
|
|
|
return AVERROR(ENOSYS);
|
2019-08-28 22:58:10 +02:00
|
|
|
#if CONFIG_VAAPI
|
|
|
|
case AV_PIX_FMT_VAAPI:
|
2021-11-07 16:57:35 +02:00
|
|
|
if (p->extensions & (FF_VK_EXT_EXTERNAL_DMABUF_MEMORY | FF_VK_EXT_DRM_MODIFIER_FLAGS))
|
2019-08-28 22:58:10 +02:00
|
|
|
return vulkan_map_to_vaapi(hwfc, dst, src, flags);
|
2021-11-05 07:55:42 +02:00
|
|
|
else
|
|
|
|
return AVERROR(ENOSYS);
|
2019-08-28 22:58:10 +02:00
|
|
|
#endif
|
|
|
|
#endif
|
|
|
|
default:
|
|
|
|
return vulkan_map_frame_to_mem(hwfc, dst, src, flags);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
typedef struct ImageBuffer {
|
|
|
|
VkBuffer buf;
|
|
|
|
VkDeviceMemory mem;
|
|
|
|
VkMemoryPropertyFlagBits flags;
|
2020-05-23 20:02:08 +02:00
|
|
|
int mapped_mem;
|
2019-08-28 22:58:10 +02:00
|
|
|
} ImageBuffer;
|
|
|
|
|
2020-05-14 22:53:22 +02:00
|
|
|
static void free_buf(void *opaque, uint8_t *data)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
2020-05-14 22:53:22 +02:00
|
|
|
AVHWDeviceContext *ctx = opaque;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2020-05-14 22:53:22 +02:00
|
|
|
ImageBuffer *vkbuf = (ImageBuffer *)data;
|
|
|
|
|
|
|
|
if (vkbuf->buf)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->DestroyBuffer(hwctx->act_dev, vkbuf->buf, hwctx->alloc);
|
2020-05-14 22:53:22 +02:00
|
|
|
if (vkbuf->mem)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->FreeMemory(hwctx->act_dev, vkbuf->mem, hwctx->alloc);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-05-14 22:53:22 +02:00
|
|
|
av_free(data);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-11-25 01:20:06 +02:00
|
|
|
static size_t get_req_buffer_size(VulkanDevicePriv *p, int *stride, int height)
|
|
|
|
{
|
|
|
|
size_t size;
|
|
|
|
*stride = FFALIGN(*stride, p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
|
|
|
|
size = height*(*stride);
|
|
|
|
size = FFALIGN(size, p->props.properties.limits.minMemoryMapAlignment);
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int create_buf(AVHWDeviceContext *ctx, AVBufferRef **buf,
|
|
|
|
VkBufferUsageFlags usage, VkMemoryPropertyFlagBits flags,
|
|
|
|
size_t size, uint32_t req_memory_bits, int host_mapped,
|
|
|
|
void *create_pnext, void *alloc_pnext)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
int err;
|
|
|
|
VkResult ret;
|
2020-05-26 11:34:31 +02:00
|
|
|
int use_ded_mem;
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
VkBufferCreateInfo buf_spawn = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
|
|
|
.pNext = create_pnext,
|
|
|
|
.usage = usage,
|
2020-11-25 01:20:06 +02:00
|
|
|
.size = size,
|
2019-08-28 22:58:10 +02:00
|
|
|
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
|
|
|
};
|
|
|
|
|
2020-05-26 11:34:31 +02:00
|
|
|
VkBufferMemoryRequirementsInfo2 req_desc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
|
|
|
|
};
|
|
|
|
VkMemoryDedicatedAllocateInfo ded_alloc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
|
|
|
|
.pNext = alloc_pnext,
|
|
|
|
};
|
|
|
|
VkMemoryDedicatedRequirements ded_req = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
|
|
|
|
};
|
|
|
|
VkMemoryRequirements2 req = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
|
|
|
|
.pNext = &ded_req,
|
|
|
|
};
|
|
|
|
|
2020-05-14 22:53:22 +02:00
|
|
|
ImageBuffer *vkbuf = av_mallocz(sizeof(*vkbuf));
|
|
|
|
if (!vkbuf)
|
|
|
|
return AVERROR(ENOMEM);
|
|
|
|
|
2020-11-25 01:20:06 +02:00
|
|
|
vkbuf->mapped_mem = host_mapped;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->CreateBuffer(hwctx->act_dev, &buf_spawn, NULL, &vkbuf->buf);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to create buffer: %s\n",
|
|
|
|
vk_ret2str(ret));
|
2020-11-25 01:20:06 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-05-26 11:34:31 +02:00
|
|
|
req_desc.buffer = vkbuf->buf;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->GetBufferMemoryRequirements2(hwctx->act_dev, &req_desc, &req);
|
2020-05-26 11:34:31 +02:00
|
|
|
|
|
|
|
/* In case the implementation prefers/requires dedicated allocation */
|
|
|
|
use_ded_mem = ded_req.prefersDedicatedAllocation |
|
|
|
|
ded_req.requiresDedicatedAllocation;
|
|
|
|
if (use_ded_mem)
|
|
|
|
ded_alloc.buffer = vkbuf->buf;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-25 01:20:06 +02:00
|
|
|
/* Additional requirements imposed on us */
|
|
|
|
if (req_memory_bits)
|
|
|
|
req.memoryRequirements.memoryTypeBits &= req_memory_bits;
|
|
|
|
|
2020-05-26 11:34:31 +02:00
|
|
|
err = alloc_mem(ctx, &req.memoryRequirements, flags,
|
|
|
|
use_ded_mem ? &ded_alloc : (void *)ded_alloc.pNext,
|
|
|
|
&vkbuf->flags, &vkbuf->mem);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (err)
|
2020-11-25 01:20:06 +02:00
|
|
|
goto fail;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->BindBufferMemory(hwctx->act_dev, vkbuf->buf, vkbuf->mem, 0);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to bind memory to buffer: %s\n",
|
|
|
|
vk_ret2str(ret));
|
2020-11-25 01:20:06 +02:00
|
|
|
err = AVERROR_EXTERNAL;
|
|
|
|
goto fail;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-05-14 22:53:22 +02:00
|
|
|
*buf = av_buffer_create((uint8_t *)vkbuf, sizeof(*vkbuf), free_buf, ctx, 0);
|
|
|
|
if (!(*buf)) {
|
2020-11-25 01:20:06 +02:00
|
|
|
err = AVERROR(ENOMEM);
|
|
|
|
goto fail;
|
2020-05-14 22:53:22 +02:00
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
return 0;
|
2020-11-25 01:20:06 +02:00
|
|
|
|
|
|
|
fail:
|
|
|
|
free_buf(ctx, (uint8_t *)vkbuf);
|
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-05-23 20:02:08 +02:00
|
|
|
/* Skips mapping of host mapped buffers but still invalidates them */
|
2020-05-14 22:53:22 +02:00
|
|
|
static int map_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs, uint8_t *mem[],
|
2019-08-28 22:58:10 +02:00
|
|
|
int nb_buffers, int invalidate)
|
|
|
|
{
|
|
|
|
VkResult ret;
|
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkMappedMemoryRange invalidate_ctx[AV_NUM_DATA_POINTERS];
|
|
|
|
int invalidate_count = 0;
|
|
|
|
|
|
|
|
for (int i = 0; i < nb_buffers; i++) {
|
2020-05-14 22:53:22 +02:00
|
|
|
ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
|
2020-05-23 20:02:08 +02:00
|
|
|
if (vkbuf->mapped_mem)
|
|
|
|
continue;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->MapMemory(hwctx->act_dev, vkbuf->mem, 0,
|
|
|
|
VK_WHOLE_SIZE, 0, (void **)&mem[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to map buffer memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
return AVERROR_EXTERNAL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!invalidate)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
for (int i = 0; i < nb_buffers; i++) {
|
2020-05-14 22:53:22 +02:00
|
|
|
ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
|
2019-08-28 22:58:10 +02:00
|
|
|
const VkMappedMemoryRange ival_buf = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
|
2020-05-14 22:53:22 +02:00
|
|
|
.memory = vkbuf->mem,
|
2019-08-28 22:58:10 +02:00
|
|
|
.size = VK_WHOLE_SIZE,
|
|
|
|
};
|
2020-11-25 01:20:06 +02:00
|
|
|
|
|
|
|
/* For host imported memory Vulkan says to use platform-defined
|
|
|
|
* sync methods, but doesn't really say not to call flush or invalidate
|
|
|
|
* on original host pointers. It does explicitly allow to do that on
|
|
|
|
* host-mapped pointers which are then mapped again using vkMapMemory,
|
|
|
|
* but known implementations return the original pointers when mapped
|
|
|
|
* again. */
|
2020-05-14 22:53:22 +02:00
|
|
|
if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
|
2019-08-28 22:58:10 +02:00
|
|
|
continue;
|
2020-11-25 01:20:06 +02:00
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
invalidate_ctx[invalidate_count++] = ival_buf;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (invalidate_count) {
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->InvalidateMappedMemoryRanges(hwctx->act_dev, invalidate_count,
|
|
|
|
invalidate_ctx);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS)
|
|
|
|
av_log(ctx, AV_LOG_WARNING, "Failed to invalidate memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-05-14 22:53:22 +02:00
|
|
|
static int unmap_buffers(AVHWDeviceContext *ctx, AVBufferRef **bufs,
|
2019-08-28 22:58:10 +02:00
|
|
|
int nb_buffers, int flush)
|
|
|
|
{
|
|
|
|
int err = 0;
|
|
|
|
VkResult ret;
|
|
|
|
AVVulkanDeviceContext *hwctx = ctx->hwctx;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkMappedMemoryRange flush_ctx[AV_NUM_DATA_POINTERS];
|
|
|
|
int flush_count = 0;
|
|
|
|
|
|
|
|
if (flush) {
|
|
|
|
for (int i = 0; i < nb_buffers; i++) {
|
2020-05-14 22:53:22 +02:00
|
|
|
ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
|
2019-08-28 22:58:10 +02:00
|
|
|
const VkMappedMemoryRange flush_buf = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
|
2020-05-14 22:53:22 +02:00
|
|
|
.memory = vkbuf->mem,
|
2019-08-28 22:58:10 +02:00
|
|
|
.size = VK_WHOLE_SIZE,
|
|
|
|
};
|
2020-11-25 01:20:06 +02:00
|
|
|
|
2020-05-14 22:53:22 +02:00
|
|
|
if (vkbuf->flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
|
2019-08-28 22:58:10 +02:00
|
|
|
continue;
|
2020-11-25 01:20:06 +02:00
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
flush_ctx[flush_count++] = flush_buf;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (flush_count) {
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->FlushMappedMemoryRanges(hwctx->act_dev, flush_count, flush_ctx);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (ret != VK_SUCCESS) {
|
|
|
|
av_log(ctx, AV_LOG_ERROR, "Failed to flush memory: %s\n",
|
|
|
|
vk_ret2str(ret));
|
|
|
|
err = AVERROR_EXTERNAL; /* We still want to try to unmap them */
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-14 22:53:22 +02:00
|
|
|
for (int i = 0; i < nb_buffers; i++) {
|
|
|
|
ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
|
2020-05-23 20:02:08 +02:00
|
|
|
if (vkbuf->mapped_mem)
|
|
|
|
continue;
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->UnmapMemory(hwctx->act_dev, vkbuf->mem);
|
2020-05-14 22:53:22 +02:00
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
static int transfer_image_buf(AVHWFramesContext *hwfc, const AVFrame *f,
|
2020-11-25 01:20:06 +02:00
|
|
|
AVBufferRef **bufs, size_t *buf_offsets,
|
|
|
|
const int *buf_stride, int w,
|
2019-08-28 22:58:10 +02:00
|
|
|
int h, enum AVPixelFormat pix_fmt, int to_buf)
|
|
|
|
{
|
2020-05-14 01:28:00 +02:00
|
|
|
int err;
|
|
|
|
AVVkFrame *frame = (AVVkFrame *)f->data[0];
|
2020-05-15 01:01:08 +02:00
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
2021-04-29 02:44:41 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2020-03-12 18:57:14 +02:00
|
|
|
|
|
|
|
int bar_num = 0;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkPipelineStageFlagBits sem_wait_dst[AV_NUM_DATA_POINTERS];
|
|
|
|
|
|
|
|
const int planes = av_pix_fmt_count_planes(pix_fmt);
|
|
|
|
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
|
|
|
|
|
|
|
|
VkImageMemoryBarrier img_bar[AV_NUM_DATA_POINTERS] = { 0 };
|
2020-05-15 01:01:08 +02:00
|
|
|
VulkanExecCtx *ectx = to_buf ? &fp->download_ctx : &fp->upload_ctx;
|
|
|
|
VkCommandBuffer cmd_buf = get_buf_exec_ctx(hwfc, ectx);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
uint64_t sem_signal_values[AV_NUM_DATA_POINTERS];
|
|
|
|
|
|
|
|
VkTimelineSemaphoreSubmitInfo s_timeline_sem_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
|
|
|
|
.pWaitSemaphoreValues = frame->sem_value,
|
|
|
|
.pSignalSemaphoreValues = sem_signal_values,
|
|
|
|
.waitSemaphoreValueCount = planes,
|
|
|
|
.signalSemaphoreValueCount = planes,
|
|
|
|
};
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
VkSubmitInfo s_info = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
|
2021-11-04 13:17:06 +02:00
|
|
|
.pNext = &s_timeline_sem_info,
|
2020-05-12 00:27:01 +02:00
|
|
|
.pSignalSemaphores = frame->sem,
|
|
|
|
.pWaitSemaphores = frame->sem,
|
2019-08-28 22:58:10 +02:00
|
|
|
.pWaitDstStageMask = sem_wait_dst,
|
2020-05-12 00:27:01 +02:00
|
|
|
.signalSemaphoreCount = planes,
|
|
|
|
.waitSemaphoreCount = planes,
|
2019-08-28 22:58:10 +02:00
|
|
|
};
|
|
|
|
|
2021-11-04 13:17:06 +02:00
|
|
|
for (int i = 0; i < planes; i++)
|
|
|
|
sem_signal_values[i] = frame->sem_value[i] + 1;
|
|
|
|
|
2020-05-15 01:01:08 +02:00
|
|
|
if ((err = wait_start_exec_ctx(hwfc, ectx)))
|
2020-05-14 01:28:00 +02:00
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Change the image layout to something more optimal for transfers */
|
|
|
|
for (int i = 0; i < planes; i++) {
|
2020-03-12 18:57:14 +02:00
|
|
|
VkImageLayout new_layout = to_buf ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL :
|
|
|
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
|
|
|
VkAccessFlags new_access = to_buf ? VK_ACCESS_TRANSFER_READ_BIT :
|
2019-08-28 22:58:10 +02:00
|
|
|
VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
|
|
|
|
sem_wait_dst[i] = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
|
|
|
|
2020-03-12 18:57:14 +02:00
|
|
|
/* If the layout matches and we have read access skip the barrier */
|
|
|
|
if ((frame->layout[i] == new_layout) && (frame->access[i] & new_access))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
img_bar[bar_num].sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
|
|
|
img_bar[bar_num].srcAccessMask = 0x0;
|
|
|
|
img_bar[bar_num].dstAccessMask = new_access;
|
|
|
|
img_bar[bar_num].oldLayout = frame->layout[i];
|
|
|
|
img_bar[bar_num].newLayout = new_layout;
|
|
|
|
img_bar[bar_num].srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
|
|
img_bar[bar_num].dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
|
|
img_bar[bar_num].image = frame->img[i];
|
|
|
|
img_bar[bar_num].subresourceRange.levelCount = 1;
|
|
|
|
img_bar[bar_num].subresourceRange.layerCount = 1;
|
|
|
|
img_bar[bar_num].subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
|
|
|
|
frame->layout[i] = img_bar[bar_num].newLayout;
|
|
|
|
frame->access[i] = img_bar[bar_num].dstAccessMask;
|
|
|
|
|
|
|
|
bar_num++;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-03-12 18:57:14 +02:00
|
|
|
if (bar_num)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->CmdPipelineBarrier(cmd_buf, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
|
|
|
|
VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
|
|
|
|
0, NULL, 0, NULL, bar_num, img_bar);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
/* Schedule a copy for each plane */
|
|
|
|
for (int i = 0; i < planes; i++) {
|
2020-05-14 22:53:22 +02:00
|
|
|
ImageBuffer *vkbuf = (ImageBuffer *)bufs[i]->data;
|
2019-08-28 22:58:10 +02:00
|
|
|
VkBufferImageCopy buf_reg = {
|
2020-11-25 01:20:06 +02:00
|
|
|
.bufferOffset = buf_offsets[i],
|
2019-08-28 22:58:10 +02:00
|
|
|
.bufferRowLength = buf_stride[i] / desc->comp[i].step,
|
|
|
|
.imageSubresource.layerCount = 1,
|
|
|
|
.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
|
|
.imageOffset = { 0, 0, 0, },
|
|
|
|
};
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
int p_w, p_h;
|
|
|
|
get_plane_wh(&p_w, &p_h, pix_fmt, w, h, i);
|
|
|
|
|
|
|
|
buf_reg.bufferImageHeight = p_h;
|
|
|
|
buf_reg.imageExtent = (VkExtent3D){ p_w, p_h, 1, };
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
if (to_buf)
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->CmdCopyImageToBuffer(cmd_buf, frame->img[i], frame->layout[i],
|
|
|
|
vkbuf->buf, 1, &buf_reg);
|
2019-08-28 22:58:10 +02:00
|
|
|
else
|
2021-04-29 02:44:41 +02:00
|
|
|
vk->CmdCopyBufferToImage(cmd_buf, vkbuf->buf, frame->img[i],
|
|
|
|
frame->layout[i], 1, &buf_reg);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
2020-05-14 01:28:00 +02:00
|
|
|
/* When uploading, do this asynchronously if the source is refcounted by
|
|
|
|
* keeping the buffers as a submission dependency.
|
|
|
|
* The hwcontext is guaranteed to not be freed until all frames are freed
|
|
|
|
* in the frames_unint function.
|
|
|
|
* When downloading to buffer, do this synchronously and wait for the
|
|
|
|
* queue submission to finish executing */
|
|
|
|
if (!to_buf) {
|
|
|
|
int ref;
|
|
|
|
for (ref = 0; ref < AV_NUM_DATA_POINTERS; ref++) {
|
|
|
|
if (!f->buf[ref])
|
|
|
|
break;
|
2020-05-15 01:01:08 +02:00
|
|
|
if ((err = add_buf_dep_exec_ctx(hwfc, ectx, &f->buf[ref], 1)))
|
2020-05-14 01:28:00 +02:00
|
|
|
return err;
|
|
|
|
}
|
2020-05-15 01:01:08 +02:00
|
|
|
if (ref && (err = add_buf_dep_exec_ctx(hwfc, ectx, bufs, planes)))
|
2020-05-14 01:28:00 +02:00
|
|
|
return err;
|
2021-11-04 13:17:06 +02:00
|
|
|
return submit_exec_ctx(hwfc, ectx, &s_info, frame, !ref);
|
2019-08-28 22:58:10 +02:00
|
|
|
} else {
|
2021-11-04 13:17:06 +02:00
|
|
|
return submit_exec_ctx(hwfc, ectx, &s_info, frame, 1);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
static int vulkan_transfer_data(AVHWFramesContext *hwfc, const AVFrame *vkf,
|
|
|
|
const AVFrame *swf, int from)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
int err = 0;
|
2020-11-25 01:20:06 +02:00
|
|
|
VkResult ret;
|
2020-11-24 14:36:23 +02:00
|
|
|
AVVkFrame *f = (AVVkFrame *)vkf->data[0];
|
2019-08-28 22:58:10 +02:00
|
|
|
AVHWDeviceContext *dev_ctx = hwfc->device_ctx;
|
2020-11-25 01:20:06 +02:00
|
|
|
AVVulkanDeviceContext *hwctx = dev_ctx->hwctx;
|
2020-05-23 20:02:08 +02:00
|
|
|
VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
2021-11-07 16:57:35 +02:00
|
|
|
FFVulkanFunctions *vk = &p->vkfn;
|
2020-11-24 14:36:23 +02:00
|
|
|
|
|
|
|
AVFrame tmp;
|
|
|
|
AVBufferRef *bufs[AV_NUM_DATA_POINTERS] = { 0 };
|
2020-11-25 01:20:06 +02:00
|
|
|
size_t buf_offsets[AV_NUM_DATA_POINTERS] = { 0 };
|
2020-11-24 14:36:23 +02:00
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
int p_w, p_h;
|
2020-11-24 14:36:23 +02:00
|
|
|
const int planes = av_pix_fmt_count_planes(swf->format);
|
|
|
|
|
2020-05-23 20:02:08 +02:00
|
|
|
int host_mapped[AV_NUM_DATA_POINTERS] = { 0 };
|
2021-11-07 16:57:35 +02:00
|
|
|
const int map_host = !!(p->extensions & FF_VK_EXT_EXTERNAL_HOST_MEMORY);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
if ((swf->format != AV_PIX_FMT_NONE && !av_vkfmt_from_pixfmt(swf->format))) {
|
|
|
|
av_log(hwfc, AV_LOG_ERROR, "Unsupported software frame pixel format!\n");
|
2019-08-28 22:58:10 +02:00
|
|
|
return AVERROR(EINVAL);
|
|
|
|
}
|
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
if (swf->width > hwfc->width || swf->height > hwfc->height)
|
2019-08-28 22:58:10 +02:00
|
|
|
return AVERROR(EINVAL);
|
|
|
|
|
|
|
|
/* For linear, host visiable images */
|
|
|
|
if (f->tiling == VK_IMAGE_TILING_LINEAR &&
|
|
|
|
f->flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
|
|
|
|
AVFrame *map = av_frame_alloc();
|
|
|
|
if (!map)
|
|
|
|
return AVERROR(ENOMEM);
|
2020-11-24 14:36:23 +02:00
|
|
|
map->format = swf->format;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
err = vulkan_map_frame_to_mem(hwfc, map, vkf, AV_HWFRAME_MAP_WRITE);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (err)
|
2020-05-14 01:28:00 +02:00
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
err = av_frame_copy((AVFrame *)(from ? swf : map), from ? map : swf);
|
2019-08-28 22:58:10 +02:00
|
|
|
av_frame_free(&map);
|
2020-05-14 01:28:00 +02:00
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Create buffers */
|
|
|
|
for (int i = 0; i < planes; i++) {
|
2020-11-25 01:20:06 +02:00
|
|
|
size_t req_size;
|
2020-05-23 20:02:08 +02:00
|
|
|
|
2020-11-22 23:48:14 +02:00
|
|
|
VkExternalMemoryBufferCreateInfo create_desc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
|
|
|
|
.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
|
|
|
|
};
|
|
|
|
|
2020-05-23 20:02:08 +02:00
|
|
|
VkImportMemoryHostPointerInfoEXT import_desc = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
|
|
|
|
.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
|
|
|
|
};
|
|
|
|
|
2020-11-25 01:20:06 +02:00
|
|
|
VkMemoryHostPointerPropertiesEXT p_props = {
|
|
|
|
.sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
|
|
|
|
};
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
|
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
tmp.linesize[i] = FFABS(swf->linesize[i]);
|
2020-11-25 01:20:06 +02:00
|
|
|
|
|
|
|
/* Do not map images with a negative stride */
|
|
|
|
if (map_host && swf->linesize[i] > 0) {
|
|
|
|
size_t offs;
|
|
|
|
offs = (uintptr_t)swf->data[i] % p->hprops.minImportedHostPointerAlignment;
|
|
|
|
import_desc.pHostPointer = swf->data[i] - offs;
|
|
|
|
|
|
|
|
/* We have to compensate for the few extra bytes of padding we
|
|
|
|
* completely ignore at the start */
|
2020-11-25 23:32:48 +02:00
|
|
|
req_size = FFALIGN(offs + tmp.linesize[i] * p_h,
|
2020-11-25 01:20:06 +02:00
|
|
|
p->hprops.minImportedHostPointerAlignment);
|
|
|
|
|
2021-04-29 02:44:41 +02:00
|
|
|
ret = vk->GetMemoryHostPointerPropertiesEXT(hwctx->act_dev,
|
|
|
|
import_desc.handleType,
|
|
|
|
import_desc.pHostPointer,
|
|
|
|
&p_props);
|
2020-11-25 01:20:06 +02:00
|
|
|
|
|
|
|
if (ret == VK_SUCCESS) {
|
|
|
|
host_mapped[i] = 1;
|
|
|
|
buf_offsets[i] = offs;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!host_mapped[i])
|
2020-11-25 23:32:48 +02:00
|
|
|
req_size = get_req_buffer_size(p, &tmp.linesize[i], p_h);
|
2020-11-25 01:20:06 +02:00
|
|
|
|
|
|
|
err = create_buf(dev_ctx, &bufs[i],
|
2020-11-24 14:36:23 +02:00
|
|
|
from ? VK_BUFFER_USAGE_TRANSFER_DST_BIT :
|
|
|
|
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
|
2020-11-22 23:48:14 +02:00
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
|
2020-11-25 01:20:06 +02:00
|
|
|
req_size, p_props.memoryTypeBits, host_mapped[i],
|
2020-11-22 23:48:14 +02:00
|
|
|
host_mapped[i] ? &create_desc : NULL,
|
2020-05-23 20:02:08 +02:00
|
|
|
host_mapped[i] ? &import_desc : NULL);
|
2019-08-28 22:58:10 +02:00
|
|
|
if (err)
|
|
|
|
goto end;
|
|
|
|
}
|
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
if (!from) {
|
2021-08-10 12:47:06 +02:00
|
|
|
/* Map, copy image TO buffer (which then goes to the VkImage), unmap */
|
2020-11-24 14:36:23 +02:00
|
|
|
if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
|
|
|
|
goto end;
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
if (host_mapped[i])
|
|
|
|
continue;
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
|
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
av_image_copy_plane(tmp.data[i], tmp.linesize[i],
|
|
|
|
(const uint8_t *)swf->data[i], swf->linesize[i],
|
|
|
|
FFMIN(tmp.linesize[i], FFABS(swf->linesize[i])),
|
2020-11-25 23:32:48 +02:00
|
|
|
p_h);
|
2020-11-24 14:36:23 +02:00
|
|
|
}
|
2020-05-23 20:02:08 +02:00
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
|
|
|
|
goto end;
|
2020-05-23 20:02:08 +02:00
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
/* Copy buffers into/from image */
|
2020-11-25 01:20:06 +02:00
|
|
|
err = transfer_image_buf(hwfc, vkf, bufs, buf_offsets, tmp.linesize,
|
2020-11-24 14:36:23 +02:00
|
|
|
swf->width, swf->height, swf->format, from);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
2020-11-24 14:36:23 +02:00
|
|
|
if (from) {
|
2021-08-10 12:47:06 +02:00
|
|
|
/* Map, copy buffer (which came FROM the VkImage) to the frame, unmap */
|
2020-11-24 14:36:23 +02:00
|
|
|
if ((err = map_buffers(dev_ctx, bufs, tmp.data, planes, 0)))
|
|
|
|
goto end;
|
|
|
|
|
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
if (host_mapped[i])
|
|
|
|
continue;
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
get_plane_wh(&p_w, &p_h, swf->format, swf->width, swf->height, i);
|
|
|
|
|
2021-08-10 12:47:06 +02:00
|
|
|
av_image_copy_plane_uc_from(swf->data[i], swf->linesize[i],
|
|
|
|
(const uint8_t *)tmp.data[i], tmp.linesize[i],
|
|
|
|
FFMIN(tmp.linesize[i], FFABS(swf->linesize[i])),
|
|
|
|
p_h);
|
2020-11-24 14:36:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if ((err = unmap_buffers(dev_ctx, bufs, planes, 1)))
|
|
|
|
goto end;
|
|
|
|
}
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
end:
|
|
|
|
for (int i = 0; i < planes; i++)
|
2020-05-14 22:53:22 +02:00
|
|
|
av_buffer_unref(&bufs[i]);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int vulkan_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst,
|
2020-11-24 14:36:23 +02:00
|
|
|
const AVFrame *src)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
|
|
|
|
|
|
|
switch (src->format) {
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
case AV_PIX_FMT_CUDA:
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
if ((p->extensions & FF_VK_EXT_EXTERNAL_WIN32_MEMORY) &&
|
|
|
|
(p->extensions & FF_VK_EXT_EXTERNAL_WIN32_SEM))
|
|
|
|
#else
|
2021-11-07 16:57:35 +02:00
|
|
|
if ((p->extensions & FF_VK_EXT_EXTERNAL_FD_MEMORY) &&
|
|
|
|
(p->extensions & FF_VK_EXT_EXTERNAL_FD_SEM))
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
return vulkan_transfer_data_from_cuda(hwfc, dst, src);
|
|
|
|
#endif
|
|
|
|
default:
|
|
|
|
if (src->hw_frames_ctx)
|
|
|
|
return AVERROR(ENOSYS);
|
|
|
|
else
|
2020-11-24 14:36:23 +02:00
|
|
|
return vulkan_transfer_data(hwfc, dst, src, 0);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
static int vulkan_transfer_data_to_cuda(AVHWFramesContext *hwfc, AVFrame *dst,
|
2020-11-24 14:36:23 +02:00
|
|
|
const AVFrame *src)
|
2019-08-28 22:58:10 +02:00
|
|
|
{
|
|
|
|
int err;
|
|
|
|
CUcontext dummy;
|
|
|
|
AVVkFrame *dst_f;
|
|
|
|
AVVkFrameInternal *dst_int;
|
2021-11-13 00:51:11 +02:00
|
|
|
VulkanFramesPriv *fp = hwfc->internal->priv;
|
2019-08-28 22:58:10 +02:00
|
|
|
const int planes = av_pix_fmt_count_planes(hwfc->sw_format);
|
|
|
|
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(hwfc->sw_format);
|
|
|
|
|
|
|
|
AVHWFramesContext *cuda_fc = (AVHWFramesContext*)dst->hw_frames_ctx->data;
|
|
|
|
AVHWDeviceContext *cuda_cu = cuda_fc->device_ctx;
|
|
|
|
AVCUDADeviceContext *cuda_dev = cuda_cu->hwctx;
|
|
|
|
AVCUDADeviceContextInternal *cu_internal = cuda_dev->internal;
|
|
|
|
CudaFunctions *cu = cu_internal->cuda_dl;
|
2020-12-06 00:51:47 +02:00
|
|
|
CUDA_EXTERNAL_SEMAPHORE_WAIT_PARAMS s_w_par[AV_NUM_DATA_POINTERS] = { 0 };
|
|
|
|
CUDA_EXTERNAL_SEMAPHORE_SIGNAL_PARAMS s_s_par[AV_NUM_DATA_POINTERS] = { 0 };
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
dst_f = (AVVkFrame *)src->data[0];
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_EXPORT);
|
|
|
|
if (err < 0)
|
|
|
|
return err;
|
|
|
|
|
|
|
|
err = CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
|
|
|
|
if (err < 0)
|
|
|
|
return err;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
err = vulkan_export_to_cuda(hwfc, dst->hw_frames_ctx, src);
|
|
|
|
if (err < 0) {
|
2020-11-22 23:56:33 +02:00
|
|
|
CHECK_CU(cu->cuCtxPopCurrent(&dummy));
|
|
|
|
return err;
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
dst_int = dst_f->internal;
|
|
|
|
|
2021-11-05 14:50:32 +02:00
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
s_w_par[i].params.fence.value = dst_f->sem_value[i] + 0;
|
|
|
|
s_s_par[i].params.fence.value = dst_f->sem_value[i] + 1;
|
|
|
|
}
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
|
2020-12-06 00:51:47 +02:00
|
|
|
planes, cuda_dev->stream));
|
2021-11-13 00:51:11 +02:00
|
|
|
if (err < 0)
|
2020-12-06 00:51:47 +02:00
|
|
|
goto fail;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
for (int i = 0; i < planes; i++) {
|
|
|
|
CUDA_MEMCPY2D cpy = {
|
|
|
|
.dstMemoryType = CU_MEMORYTYPE_DEVICE,
|
|
|
|
.dstDevice = (CUdeviceptr)dst->data[i],
|
|
|
|
.dstPitch = dst->linesize[i],
|
|
|
|
.dstY = 0,
|
|
|
|
|
|
|
|
.srcMemoryType = CU_MEMORYTYPE_ARRAY,
|
|
|
|
.srcArray = dst_int->cu_array[i],
|
|
|
|
};
|
|
|
|
|
2020-11-25 23:32:48 +02:00
|
|
|
int w, h;
|
|
|
|
get_plane_wh(&w, &h, hwfc->sw_format, hwfc->width, hwfc->height, i);
|
|
|
|
|
|
|
|
cpy.WidthInBytes = w * desc->comp[i].step;
|
|
|
|
cpy.Height = h;
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
|
|
|
|
if (err < 0)
|
2019-08-28 22:58:10 +02:00
|
|
|
goto fail;
|
|
|
|
}
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
err = CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
|
2020-12-06 00:51:47 +02:00
|
|
|
planes, cuda_dev->stream));
|
2021-11-13 00:51:11 +02:00
|
|
|
if (err < 0)
|
2020-12-06 00:51:47 +02:00
|
|
|
goto fail;
|
|
|
|
|
2021-11-05 14:50:32 +02:00
|
|
|
for (int i = 0; i < planes; i++)
|
|
|
|
dst_f->sem_value[i]++;
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
CHECK_CU(cu->cuCtxPopCurrent(&dummy));
|
|
|
|
|
|
|
|
av_log(hwfc, AV_LOG_VERBOSE, "Transfered Vulkan image to CUDA!\n");
|
|
|
|
|
2021-11-13 00:51:11 +02:00
|
|
|
return prepare_frame(hwfc, &fp->upload_ctx, dst_f, PREP_MODE_EXTERNAL_IMPORT);
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
fail:
|
|
|
|
CHECK_CU(cu->cuCtxPopCurrent(&dummy));
|
2021-11-13 05:14:04 +02:00
|
|
|
vulkan_free_internal(dst_f);
|
2019-08-28 22:58:10 +02:00
|
|
|
dst_f->internal = NULL;
|
|
|
|
av_buffer_unref(&dst->buf[0]);
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst,
|
|
|
|
const AVFrame *src)
|
|
|
|
{
|
|
|
|
av_unused VulkanDevicePriv *p = hwfc->device_ctx->internal->priv;
|
|
|
|
|
|
|
|
switch (dst->format) {
|
|
|
|
#if CONFIG_CUDA
|
|
|
|
case AV_PIX_FMT_CUDA:
|
2021-11-13 21:00:50 +02:00
|
|
|
#ifdef _WIN32
|
|
|
|
if ((p->extensions & FF_VK_EXT_EXTERNAL_WIN32_MEMORY) &&
|
|
|
|
(p->extensions & FF_VK_EXT_EXTERNAL_WIN32_SEM))
|
|
|
|
#else
|
2021-11-07 16:57:35 +02:00
|
|
|
if ((p->extensions & FF_VK_EXT_EXTERNAL_FD_MEMORY) &&
|
|
|
|
(p->extensions & FF_VK_EXT_EXTERNAL_FD_SEM))
|
2021-11-13 21:00:50 +02:00
|
|
|
#endif
|
2019-08-28 22:58:10 +02:00
|
|
|
return vulkan_transfer_data_to_cuda(hwfc, dst, src);
|
|
|
|
#endif
|
|
|
|
default:
|
|
|
|
if (dst->hw_frames_ctx)
|
|
|
|
return AVERROR(ENOSYS);
|
|
|
|
else
|
2020-11-24 14:36:23 +02:00
|
|
|
return vulkan_transfer_data(hwfc, src, dst, 1);
|
2019-08-28 22:58:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-25 21:56:00 +02:00
|
|
|
static int vulkan_frames_derive_to(AVHWFramesContext *dst_fc,
|
|
|
|
AVHWFramesContext *src_fc, int flags)
|
|
|
|
{
|
|
|
|
return vulkan_frames_init(dst_fc);
|
|
|
|
}
|
|
|
|
|
2019-08-28 22:58:10 +02:00
|
|
|
AVVkFrame *av_vk_frame_alloc(void)
|
|
|
|
{
|
|
|
|
return av_mallocz(sizeof(AVVkFrame));
|
|
|
|
}
|
|
|
|
|
|
|
|
const HWContextType ff_hwcontext_type_vulkan = {
|
|
|
|
.type = AV_HWDEVICE_TYPE_VULKAN,
|
|
|
|
.name = "Vulkan",
|
|
|
|
|
|
|
|
.device_hwctx_size = sizeof(AVVulkanDeviceContext),
|
|
|
|
.device_priv_size = sizeof(VulkanDevicePriv),
|
|
|
|
.frames_hwctx_size = sizeof(AVVulkanFramesContext),
|
|
|
|
.frames_priv_size = sizeof(VulkanFramesPriv),
|
|
|
|
|
|
|
|
.device_init = &vulkan_device_init,
|
|
|
|
.device_create = &vulkan_device_create,
|
|
|
|
.device_derive = &vulkan_device_derive,
|
|
|
|
|
|
|
|
.frames_get_constraints = &vulkan_frames_get_constraints,
|
|
|
|
.frames_init = vulkan_frames_init,
|
|
|
|
.frames_get_buffer = vulkan_get_buffer,
|
|
|
|
.frames_uninit = vulkan_frames_uninit,
|
|
|
|
|
|
|
|
.transfer_get_formats = vulkan_transfer_get_formats,
|
|
|
|
.transfer_data_to = vulkan_transfer_data_to,
|
|
|
|
.transfer_data_from = vulkan_transfer_data_from,
|
|
|
|
|
|
|
|
.map_to = vulkan_map_to,
|
|
|
|
.map_from = vulkan_map_from,
|
2020-05-25 21:56:00 +02:00
|
|
|
.frames_derive_to = &vulkan_frames_derive_to,
|
2019-08-28 22:58:10 +02:00
|
|
|
|
|
|
|
.pix_fmts = (const enum AVPixelFormat []) {
|
|
|
|
AV_PIX_FMT_VULKAN,
|
|
|
|
AV_PIX_FMT_NONE
|
|
|
|
},
|
|
|
|
};
|