@ -129,6 +129,9 @@ typedef struct VulkanFramesPriv {
FFVkExecPool upload_exec ;
FFVkExecPool upload_exec ;
FFVkExecPool download_exec ;
FFVkExecPool download_exec ;
/* Temporary buffer pools */
AVBufferPool * tmp ;
/* Modifier info list to free at uninit */
/* Modifier info list to free at uninit */
VkImageDrmFormatModifierListCreateInfoEXT * modifier_info ;
VkImageDrmFormatModifierListCreateInfoEXT * modifier_info ;
} VulkanFramesPriv ;
} VulkanFramesPriv ;
@ -2425,6 +2428,8 @@ static void vulkan_frames_uninit(AVHWFramesContext *hwfc)
ff_vk_exec_pool_free ( & p - > vkctx , & fp - > compute_exec ) ;
ff_vk_exec_pool_free ( & p - > vkctx , & fp - > compute_exec ) ;
ff_vk_exec_pool_free ( & p - > vkctx , & fp - > upload_exec ) ;
ff_vk_exec_pool_free ( & p - > vkctx , & fp - > upload_exec ) ;
ff_vk_exec_pool_free ( & p - > vkctx , & fp - > download_exec ) ;
ff_vk_exec_pool_free ( & p - > vkctx , & fp - > download_exec ) ;
av_buffer_pool_uninit ( & fp - > tmp ) ;
}
}
static int vulkan_frames_init ( AVHWFramesContext * hwfc )
static int vulkan_frames_init ( AVHWFramesContext * hwfc )
@ -3451,245 +3456,387 @@ static int vulkan_map_from(AVHWFramesContext *hwfc, AVFrame *dst,
return AVERROR ( ENOSYS ) ;
return AVERROR ( ENOSYS ) ;
}
}
static size_t get_req_buffer_size ( VulkanDevicePriv * p , int * stride , int height )
static int copy_buffer_data ( AVHWFramesContext * hwfc , AVBufferRef * buf ,
AVFrame * swf , VkBufferImageCopy * region ,
int planes , int upload )
{
{
size_t size ;
VkResult ret ;
* stride = FFALIGN ( * stride , p - > props . properties . limits . optimalBufferCopyRowPitchAlignment ) ;
size = height * ( * stride ) ;
size = FFALIGN ( size , p - > props . properties . limits . minMemoryMapAlignment ) ;
return size ;
}
static int transfer_image_buf ( AVHWFramesContext * hwfc , AVFrame * f ,
AVBufferRef * * bufs , size_t * buf_offsets ,
const int * buf_stride , int w ,
int h , enum AVPixelFormat pix_fmt , int to_buf )
{
int err ;
AVVkFrame * frame = ( AVVkFrame * ) f - > data [ 0 ] ;
VulkanFramesPriv * fp = hwfc - > hwctx ;
VulkanDevicePriv * p = hwfc - > device_ctx - > hwctx ;
VulkanDevicePriv * p = hwfc - > device_ctx - > hwctx ;
FFVulkanFunctions * vk = & p - > vkctx . vkfn ;
FFVulkanFunctions * vk = & p - > vkctx . vkfn ;
VkImageMemoryBarrier2 img_bar [ AV_NUM_DATA_POINTERS ] ;
AVVulkanDeviceContext * hwctx = & p - > p ;
int nb_img_bar = 0 ;
const int nb_images = ff_vk_count_images ( frame ) ;
int pixfmt_planes = av_pix_fmt_count_planes ( pix_fmt ) ;
const AVPixFmtDescriptor * desc = av_pix_fmt_desc_get ( pix_fmt ) ;
VkCommandBuffer cmd_buf ;
FFVkBuffer * vkbuf = ( FFVkBuffer * ) buf - > data ;
FFVkExecContext * exec = ff_vk_exec_get ( to_buf ? & fp - > download_exec :
& fp - > upload_exec ) ;
cmd_buf = exec - > buf ;
ff_vk_exec_start ( & p - > vkctx , exec ) ;
err = ff_vk_exec_add_dep_buf ( & p - > vkctx , exec , bufs , pixfmt_planes , 1 ) ;
const VkMappedMemoryRange flush_info = {
if ( err < 0 )
. sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE ,
return err ;
. memory = vkbuf - > mem ,
. size = VK_WHOLE_SIZE ,
} ;
err = ff_vk_exec_add_dep_frame ( & p - > vkctx , exec , f ,
if ( ! ( vkbuf - > flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) & & ! upload ) {
VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT ,
ret = vk - > InvalidateMappedMemoryRanges ( hwctx - > act_dev , 1 ,
VK_PIPELINE_STAGE_2_TRANSFER_BIT ) ;
& flush_info ) ;
if ( err < 0 )
if ( ret ! = VK_SUCCESS ) {
return err ;
av_log ( hwfc , AV_LOG_ERROR , " Failed to invalidate buffer data: %s \n " ,
ff_vk_ret2str ( ret ) ) ;
return AVERROR_EXTERNAL ;
}
}
ff_vk_frame_barrier ( & p - > vkctx , exec , f , img_bar , & nb_img_bar ,
for ( int i = 0 ; i < planes ; i + + )
VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT ,
av_image_copy_plane ( vkbuf - > mapped_mem + region [ i ] . bufferOffset ,
VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR ,
region [ i ] . bufferRowLength ,
to_buf ? VK_ACCESS_TRANSFER_READ_BIT :
swf - > data [ i ] ,
VK_ACCESS_TRANSFER_WRITE_BIT ,
swf - > linesize [ i ] ,
to_buf ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL :
swf - > linesize [ i ] ,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL ,
region [ i ] . imageExtent . height ) ;
VK_QUEUE_FAMILY_IGNORED ) ;
if ( ! ( vkbuf - > flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) & & upload ) {
ret = vk - > FlushMappedMemoryRanges ( hwctx - > act_dev , 1 ,
& flush_info ) ;
if ( ret ! = VK_SUCCESS ) {
av_log ( hwfc , AV_LOG_ERROR , " Failed to flush buffer data: %s \n " ,
ff_vk_ret2str ( ret ) ) ;
return AVERROR_EXTERNAL ;
}
}
vk - > CmdPipelineBarrier2 ( cmd_buf , & ( VkDependencyInfo ) {
return 0 ;
. sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO ,
}
. pImageMemoryBarriers = img_bar ,
. imageMemoryBarrierCount = nb_img_bar ,
} ) ;
/* Schedule a copy for each plane */
static int get_plane_buf ( AVHWFramesContext * hwfc , AVBufferRef * * dst ,
for ( int i = 0 ; i < pixfmt_planes ; i + + ) {
AVFrame * swf , VkBufferImageCopy * region , int upload )
int idx = FFMIN ( i , nb_images - 1 ) ;
{
VkImageAspectFlags plane_aspect [ ] = { VK_IMAGE_ASPECT_COLOR_BIT ,
int err ;
VK_IMAGE_ASPECT_PLANE_0_BIT ,
VulkanFramesPriv * fp = hwfc - > hwctx ;
VK_IMAGE_ASPECT_PLANE_1_BIT ,
VulkanDevicePriv * p = hwfc - > device_ctx - > hwctx ;
VK_IMAGE_ASPECT_PLANE_2_BIT , } ;
const int planes = av_pix_fmt_count_planes ( swf - > format ) ;
FFVkBuffer * vkbuf = ( FFVkBuffer * ) bufs [ i ] - > data ;
size_t buf_offset = 0 ;
VkBufferImageCopy buf_reg = {
for ( int i = 0 ; i < planes ; i + + ) {
. bufferOffset = buf_offsets [ i ] ,
size_t size ;
. bufferRowLength = buf_stride [ i ] / desc - > comp [ i ] . step ,
ptrdiff_t linesize = swf - > linesize [ i ] ;
. imageSubresource . layerCount = 1 ,
. imageSubresource . aspectMask = plane_aspect [ ( pixfmt_planes ! = nb_images ) +
i * ( pixfmt_planes ! = nb_images ) ] ,
. imageOffset = { 0 , 0 , 0 , } ,
} ;
uint32_t p_w , p_h ;
uint32_t p_w , p_h ;
get_plane_wh ( & p_w , & p_h , pix_fmt , w , h , i ) ;
get_plane_wh ( & p_w , & p_h , swf - > format , swf - > width , swf - > height , i ) ;
buf_reg . bufferImageHeight = p_h ;
linesize = FFALIGN ( linesize ,
buf_reg . imageExtent = ( VkExtent3D ) { p_w , p_h , 1 , } ;
p - > props . properties . limits . optimalBufferCopyRowPitchAlignment ) ;
size = p_h * linesize ;
if ( to_buf )
region [ i ] = ( VkBufferImageCopy ) {
vk - > CmdCopyImageToBuffer ( cmd_buf , frame - > img [ idx ] ,
. bufferOffset = buf_offset ,
img_bar [ 0 ] . newLayout ,
. bufferRowLength = linesize ,
vkbuf - > buf ,
. bufferImageHeight = p_h ,
1 , & buf_reg ) ;
. imageSubresource . layerCount = 1 ,
else
. imageExtent = ( VkExtent3D ) { p_w , p_h , 1 } ,
vk - > CmdCopyBufferToImage ( cmd_buf , vkbuf - > buf , frame - > img [ idx ] ,
/* Rest of the fields adjusted/filled in later */
img_bar [ 0 ] . newLayout ,
} ;
1 , & buf_reg ) ;
buf_offset = FFALIGN ( buf_offset + size ,
p - > props . properties . limits . optimalBufferCopyOffsetAlignment ) ;
}
}
err = ff_vk_exec_submit ( & p - > vkctx , exec ) ;
err = ff_vk_get_pooled_buffer ( & p - > vkctx , & fp - > tmp , dst ,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
VK_BUFFER_USAGE_TRANSFER_DST_BIT ,
NULL , buf_offset ,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT ) ;
if ( err < 0 )
if ( err < 0 )
return err ;
return err ;
ff_vk_exec_wait ( & p - > vkctx , exec ) ;
return 0 ;
return 0 ;
}
}
static int vulkan_transfer_data ( AVHWFramesContext * hwfc , const AVFrame * vkf ,
static int create_mapped_buffer ( AVHWFramesContext * hwfc ,
const AVFrame * swf , int from )
FFVkBuffer * vkb , VkBufferUsageFlags usage ,
size_t size ,
VkExternalMemoryBufferCreateInfo * create_desc ,
VkImportMemoryHostPointerInfoEXT * import_desc ,
VkMemoryHostPointerPropertiesEXT props )
{
{
int err = 0 ;
int err ;
VkResult ret ;
VkResult ret ;
AVHWDeviceContext * dev_ctx = hwfc - > device_ctx ;
VulkanDevicePriv * p = hwfc - > device_ctx - > hwctx ;
VulkanDevicePriv * p = dev_ctx - > hwctx ;
AVVulkanDeviceContext * hwctx = & p - > p ;
FFVulkanFunctions * vk = & p - > vkctx . vkfn ;
FFVulkanFunctions * vk = & p - > vkctx . vkfn ;
AVVulkanDeviceContext * hwctx = & p - > p ;
AVFrame tmp ;
VkBufferCreateInfo buf_spawn = {
FFVkBuffer * vkbufs [ AV_NUM_DATA_POINTERS ] ;
. sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO ,
AVBufferRef * bufs [ AV_NUM_DATA_POINTERS ] = { 0 } ;
. pNext = create_desc ,
size_t buf_offsets [ AV_NUM_DATA_POINTERS ] = { 0 } ;
. usage = usage ,
. sharingMode = VK_SHARING_MODE_EXCLUSIVE ,
. size = size ,
} ;
VkMemoryRequirements req = {
. size = size ,
. alignment = p - > hprops . minImportedHostPointerAlignment ,
. memoryTypeBits = props . memoryTypeBits ,
} ;
uint32_t p_w , p_h ;
err = ff_vk_alloc_mem ( & p - > vkctx , & req ,
const int planes = av_pix_fmt_count_planes ( swf - > format ) ;
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT ,
import_desc , & vkb - > flags , & vkb - > mem ) ;
if ( err < 0 )
return err ;
int host_mapped [ AV_NUM_DATA_POINTERS ] = { 0 } ;
ret = vk - > CreateBuffer ( hwctx - > act_dev , & buf_spawn , hwctx - > alloc , & vkb - > buf ) ;
const int map_host = ! ! ( p - > vkctx . extensions & FF_VK_EXT_EXTERNAL_HOST_MEMORY ) ;
if ( ret ! = VK_SUCCESS ) {
vk - > FreeMemory ( hwctx - > act_dev , vkb - > mem , hwctx - > alloc ) ;
return AVERROR_EXTERNAL ;
}
if ( ( swf - > format ! = AV_PIX_FMT_NONE & & ! av_vkfmt_from_pixfmt ( swf - > format ) ) ) {
ret = vk - > BindBufferMemory ( hwctx - > act_dev , vkb - > buf , vkb - > mem , 0 ) ;
av_log ( hwfc , AV_LOG_ERROR , " Unsupported software frame pixel format! \n " ) ;
if ( ret ! = VK_SUCCESS ) {
return AVERROR ( EINVAL ) ;
vk - > FreeMemory ( hwctx - > act_dev , vkb - > mem , hwctx - > alloc ) ;
vk - > DestroyBuffer ( hwctx - > act_dev , vkb - > buf , hwctx - > alloc ) ;
return AVERROR_EXTERNAL ;
}
}
if ( swf - > width > hwfc - > width | | swf - > height > hwfc - > height )
return 0 ;
return AVERROR ( EINVAL ) ;
}
/* Create buffers */
static void destroy_avvkbuf ( void * opaque , uint8_t * data )
for ( int i = 0 ; i < planes ; i + + ) {
{
size_t req_size ;
FFVulkanContext * s = opaque ;
FFVkBuffer * buf = ( FFVkBuffer * ) data ;
ff_vk_free_buf ( s , buf ) ;
av_free ( buf ) ;
}
static int host_map_frame ( AVHWFramesContext * hwfc , AVBufferRef * * dst , int * nb_bufs ,
AVFrame * swf , VkBufferImageCopy * region , int upload )
{
int err ;
VkResult ret ;
VulkanDevicePriv * p = hwfc - > device_ctx - > hwctx ;
FFVulkanFunctions * vk = & p - > vkctx . vkfn ;
AVVulkanDeviceContext * hwctx = & p - > p ;
const int planes = av_pix_fmt_count_planes ( swf - > format ) ;
VkExternalMemoryBufferCreateInfo create_desc = {
VkExternalMemoryBufferCreateInfo create_desc = {
. sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO ,
. sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO ,
. handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT ,
. handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT ,
} ;
} ;
VkImportMemoryHostPointerInfoEXT import_desc = {
VkImportMemoryHostPointerInfoEXT import_desc = {
. sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT ,
. sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT ,
. handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT ,
. handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT ,
} ;
} ;
VkMemoryHostPointerPropertiesEXT props ;
VkMemoryHostPointerPropertiesEXT p_props = {
for ( int i = 0 ; i < planes ; i + + ) {
. sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT ,
FFVkBuffer * vkb ;
} ;
uint32_t p_w , p_h ;
size_t offs ;
size_t buffer_size ;
get_plane_wh ( & p_w , & p_h , swf - > format , swf - > width , swf - > height , i ) ;
/* We can't host map images with negative strides */
if ( swf - > linesize [ i ] < 0 ) {
err = AVERROR ( EINVAL ) ;
goto fail ;
}
tmp . linesize [ i ] = FFABS ( swf - > linesize [ i ] ) ;
get_plane_wh ( & p_w , & p_h , swf - > format , swf - > width , swf - > height , i ) ;
/* Do not map images with a negative stride */
/* Get the previous point at which mapping was possible and use it */
if ( map_host & & swf - > linesize [ i ] > 0 ) {
size_t offs ;
offs = ( uintptr_t ) swf - > data [ i ] % p - > hprops . minImportedHostPointerAlignment ;
offs = ( uintptr_t ) swf - > data [ i ] % p - > hprops . minImportedHostPointerAlignment ;
import_desc . pHostPointer = swf - > data [ i ] - offs ;
import_desc . pHostPointer = swf - > data [ i ] - offs ;
/* We have to compensate for the few extra bytes of padding we
props = ( VkMemoryHostPointerPropertiesEXT ) {
* completely ignore at the start */
VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT ,
req_size = FFALIGN ( offs + tmp . linesize [ i ] * p_h ,
} ;
p - > hprops . minImportedHostPointerAlignment ) ;
ret = vk - > GetMemoryHostPointerPropertiesEXT ( hwctx - > act_dev ,
ret = vk - > GetMemoryHostPointerPropertiesEXT ( hwctx - > act_dev ,
import_desc . handleType ,
import_desc . handleType ,
import_desc . pHostPointer ,
import_desc . pHostPointer ,
& p_ props) ;
& props ) ;
if ( ret = = VK_SUCCESS & & p_p rops . memoryTypeBits ) {
if ( ! ( ret = = VK_SUCCESS & & props . memoryTypeBits ) ) {
host_mapped [ i ] = 1 ;
err = AVERROR ( EINVAL ) ;
buf_offsets [ i ] = offs ;
goto fail ;
}
}
/* Buffer region for this plane */
region [ i ] = ( VkBufferImageCopy ) {
. bufferOffset = offs ,
. bufferRowLength = swf - > linesize [ i ] ,
. bufferImageHeight = p_h ,
. imageSubresource . layerCount = 1 ,
. imageExtent = ( VkExtent3D ) { p_w , p_h , 1 } ,
/* Rest of the fields adjusted/filled in later */
} ;
/* Add the offset at the start, which gets ignored */
buffer_size = offs + swf - > linesize [ i ] * p_h ;
buffer_size = FFALIGN ( buffer_size , p - > props . properties . limits . minMemoryMapAlignment ) ;
/* Create a buffer */
vkb = av_mallocz ( sizeof ( * vkb ) ) ;
if ( ! vkb ) {
err = AVERROR ( ENOMEM ) ;
goto fail ;
}
}
if ( ! host_mapped [ i ] )
err = create_mapped_buffer ( hwfc , vkb ,
req_size = get_req_buffer_size ( p , & tmp . linesize [ i ] , p_h ) ;
upload ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT :
VK_BUFFER_USAGE_TRANSFER_DST_BIT ,
buffer_size , & create_desc , & import_desc ,
props ) ;
if ( err < 0 ) {
av_free ( vkb ) ;
goto fail ;
}
err = ff_vk_create_avbuf ( & p - > vkctx , & bufs [ i ] , req_size ,
/* Create a ref */
host_mapped [ i ] ? & create_desc : NULL ,
dst [ * nb_bufs ] = av_buffer_create ( ( uint8_t * ) vkb , sizeof ( * vkb ) ,
host_mapped [ i ] ? & import_desc : NULL ,
destroy_avvkbuf , & p - > vkctx , 0 ) ;
from ? VK_BUFFER_USAGE_TRANSFER_DST_BIT :
if ( ! dst [ * nb_bufs ] ) {
VK_BUFFER_USAGE_TRANSFER_SRC_BIT ,
destroy_avvkbuf ( & p - > vkctx , ( uint8_t * ) vkb ) ;
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
err = AVERROR ( ENOMEM ) ;
( host_mapped [ i ] ?
goto fail ;
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT : 0x0 ) ) ;
}
if ( err < 0 )
goto end ;
vkbufs [ i ] = ( FFVkBuffer * ) bufs [ i ] - > data ;
( * nb_bufs ) + + ;
}
}
if ( ! from ) {
return 0 ;
/* Map, copy image TO buffer (which then goes to the VkImage), unmap */
if ( ( err = ff_vk_map_buffers ( & p - > vkctx , vkbufs , tmp . data , planes , 0 ) ) )
goto end ;
for ( int i = 0 ; i < planes ; i + + ) {
fail :
if ( host_mapped [ i ] )
for ( int i = 0 ; i < ( * nb_bufs ) ; i + + )
continue ;
av_buffer_unref ( & dst [ i ] ) ;
return err ;
}
get_plane_wh ( & p_w , & p_h , swf - > format , swf - > width , swf - > height , i ) ;
static int vulkan_transfer_frame ( AVHWFramesContext * hwfc ,
AVFrame * swf , AVFrame * hwf ,
int upload )
{
int err ;
VulkanFramesPriv * fp = hwfc - > hwctx ;
VulkanDevicePriv * p = hwfc - > device_ctx - > hwctx ;
FFVulkanFunctions * vk = & p - > vkctx . vkfn ;
int host_mapped = 0 ;
AVVkFrame * hwf_vk = ( AVVkFrame * ) hwf - > data [ 0 ] ;
VkBufferImageCopy region [ AV_NUM_DATA_POINTERS ] ; // always one per plane
av_image_copy_plane ( tmp . data [ i ] , tmp . linesize [ i ] ,
const int planes = av_pix_fmt_count_planes ( swf - > format ) ;
( const uint8_t * ) swf - > data [ i ] , swf - > linesize [ i ] ,
const AVPixFmtDescriptor * desc = av_pix_fmt_desc_get ( swf - > format ) ;
FFMIN ( tmp . linesize [ i ] , FFABS ( swf - > linesize [ i ] ) ) ,
const int nb_images = ff_vk_count_images ( hwf_vk ) ;
p_h ) ;
static const VkImageAspectFlags plane_aspect [ ] = { VK_IMAGE_ASPECT_COLOR_BIT ,
VK_IMAGE_ASPECT_PLANE_0_BIT ,
VK_IMAGE_ASPECT_PLANE_1_BIT ,
VK_IMAGE_ASPECT_PLANE_2_BIT , } ;
VkImageMemoryBarrier2 img_bar [ AV_NUM_DATA_POINTERS ] ;
int nb_img_bar = 0 ;
AVBufferRef * bufs [ AV_NUM_DATA_POINTERS ] ;
int nb_bufs = 0 ;
VkCommandBuffer cmd_buf ;
FFVkExecContext * exec ;
/* Sanity checking */
if ( ( swf - > format ! = AV_PIX_FMT_NONE & & ! av_vkfmt_from_pixfmt ( swf - > format ) ) ) {
av_log ( hwfc , AV_LOG_ERROR , " Unsupported software frame pixel format! \n " ) ;
return AVERROR ( EINVAL ) ;
}
if ( swf - > width > hwfc - > width | | swf - > height > hwfc - > height )
return AVERROR ( EINVAL ) ;
/* Setup buffers first */
if ( p - > vkctx . extensions & FF_VK_EXT_EXTERNAL_HOST_MEMORY ) {
err = host_map_frame ( hwfc , bufs , & nb_bufs , swf , region , upload ) ;
if ( err > = 0 )
host_mapped = 1 ;
}
}
if ( ( err = ff_vk_unmap_buffers ( & p - > vkctx , vkbufs , planes , 1 ) ) )
if ( ! host_mapped ) {
err = get_plane_buf ( hwfc , & bufs [ 0 ] , swf , region , upload ) ;
if ( err < 0 )
goto end ;
nb_bufs = 1 ;
if ( upload ) {
err = copy_buffer_data ( hwfc , bufs [ 0 ] , swf , region , planes , 1 ) ;
if ( err < 0 )
goto end ;
goto end ;
}
}
}
/* Copy buffers into/from image */
exec = ff_vk_exec_get ( & fp - > upload_exec ) ;
err = transfer_image_buf ( hwfc , ( AVFrame * ) vkf , bufs , buf_offsets ,
cmd_buf = exec - > buf ;
tmp . linesize , swf - > width , swf - > height , swf - > format ,
from ) ;
if ( from ) {
ff_vk_exec_start ( & p - > vkctx , exec ) ;
/* Map, copy buffer (which came FROM the VkImage) to the frame, unmap */
if ( ( err = ff_vk_map_buffers ( & p - > vkctx , vkbufs , tmp . data , planes , 0 ) ) )
/* Prep destination Vulkan frame */
err = ff_vk_exec_add_dep_frame ( & p - > vkctx , exec , hwf ,
VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT ,
VK_PIPELINE_STAGE_2_TRANSFER_BIT ) ;
if ( err < 0 )
goto end ;
goto end ;
for ( int i = 0 ; i < planes ; i + + ) {
/* No need to declare buf deps for synchronous transfers */
if ( host_mapped [ i ] )
if ( upload ) {
continue ;
err = ff_vk_exec_add_dep_buf ( & p - > vkctx , exec , bufs , nb_bufs , 1 ) ;
if ( err < 0 ) {
ff_vk_exec_discard_deps ( & p - > vkctx , exec ) ;
goto end ;
}
}
get_plane_wh ( & p_w , & p_h , swf - > format , swf - > width , swf - > height , i ) ;
ff_vk_frame_barrier ( & p - > vkctx , exec , hwf , img_bar , & nb_img_bar ,
VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT ,
VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR ,
upload ? VK_ACCESS_TRANSFER_WRITE_BIT :
VK_ACCESS_TRANSFER_READ_BIT ,
upload ? VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL :
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL ,
VK_QUEUE_FAMILY_IGNORED ) ;
av_image_copy_plane_uc_from ( swf - > data [ i ] , swf - > linesize [ i ] ,
vk - > CmdPipelineBarrier2 ( cmd_buf , & ( VkDependencyInfo ) {
( const uint8_t * ) tmp . data [ i ] , tmp . linesize [ i ] ,
. sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO ,
FFMIN ( tmp . linesize [ i ] , FFABS ( swf - > linesize [ i ] ) ) ,
. pImageMemoryBarriers = img_bar ,
p_h ) ;
. imageMemoryBarrierCount = nb_img_bar ,
} ) ;
for ( int i = 0 ; i < planes ; i + + ) {
int buf_idx = FFMIN ( i , ( nb_bufs - 1 ) ) ;
int img_idx = FFMIN ( i , ( nb_images - 1 ) ) ;
FFVkBuffer * vkbuf = ( FFVkBuffer * ) bufs [ buf_idx ] - > data ;
uint32_t orig_stride = region [ i ] . bufferRowLength ;
region [ i ] . bufferRowLength / = desc - > comp [ i ] . step ;
region [ i ] . imageSubresource . aspectMask = plane_aspect [ ( planes ! = nb_images ) +
i * ( planes ! = nb_images ) ] ;
if ( upload )
vk - > CmdCopyBufferToImage ( cmd_buf , vkbuf - > buf ,
hwf_vk - > img [ img_idx ] ,
img_bar [ img_idx ] . newLayout ,
1 , & region [ i ] ) ;
else
vk - > CmdCopyImageToBuffer ( cmd_buf , hwf_vk - > img [ img_idx ] ,
img_bar [ img_idx ] . newLayout ,
vkbuf - > buf ,
1 , & region [ i ] ) ;
region [ i ] . bufferRowLength = orig_stride ;
}
}
if ( ( err = ff_vk_unmap_buffers ( & p - > vkctx , vkbufs , planes , 1 ) ) )
err = ff_vk_exec_submit ( & p - > vkctx , exec ) ;
goto end ;
if ( err < 0 ) {
ff_vk_exec_discard_deps ( & p - > vkctx , exec ) ;
} else if ( ! upload ) {
ff_vk_exec_wait ( & p - > vkctx , exec ) ;
if ( ! host_mapped )
err = copy_buffer_data ( hwfc , bufs [ 0 ] , swf , region , planes , 0 ) ;
}
}
end :
end :
for ( int i = 0 ; i < planes ; i + + )
for ( int i = 0 ; i < nb_buf s; i + + )
av_buffer_unref ( & bufs [ i ] ) ;
av_buffer_unref ( & bufs [ i ] ) ;
return err ;
return err ;
@ -3716,7 +3863,7 @@ static int vulkan_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst,
if ( src - > hw_frames_ctx )
if ( src - > hw_frames_ctx )
return AVERROR ( ENOSYS ) ;
return AVERROR ( ENOSYS ) ;
else
else
return vulkan_transfer_data ( hwfc , dst , src , 0 ) ;
return vulkan_transfer_frame ( hwfc , ( AVFrame * ) src , dst , 1 ) ;
}
}
}
}
@ -3833,7 +3980,7 @@ static int vulkan_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst,
if ( dst - > hw_frames_ctx )
if ( dst - > hw_frames_ctx )
return AVERROR ( ENOSYS ) ;
return AVERROR ( ENOSYS ) ;
else
else
return vulkan_transfer_data ( hwfc , src , dst , 1 ) ;
return vulkan_transfer_frame ( hwfc , dst , ( AVFrame * ) src , 0 ) ;
}
}
}
}