28 #if CONFIG_SHADER_COMPRESSION
33 .r = VK_COMPONENT_SWIZZLE_IDENTITY,
34 .g = VK_COMPONENT_SWIZZLE_IDENTITY,
35 .b = VK_COMPONENT_SWIZZLE_IDENTITY,
36 .a = VK_COMPONENT_SWIZZLE_IDENTITY,
42 #define CASE(VAL) case VAL: return #VAL
50 CASE(VK_ERROR_OUT_OF_HOST_MEMORY);
51 CASE(VK_ERROR_OUT_OF_DEVICE_MEMORY);
52 CASE(VK_ERROR_INITIALIZATION_FAILED);
53 CASE(VK_ERROR_DEVICE_LOST);
54 CASE(VK_ERROR_MEMORY_MAP_FAILED);
55 CASE(VK_ERROR_LAYER_NOT_PRESENT);
56 CASE(VK_ERROR_EXTENSION_NOT_PRESENT);
57 CASE(VK_ERROR_FEATURE_NOT_PRESENT);
58 CASE(VK_ERROR_INCOMPATIBLE_DRIVER);
59 CASE(VK_ERROR_TOO_MANY_OBJECTS);
60 CASE(VK_ERROR_FORMAT_NOT_SUPPORTED);
61 CASE(VK_ERROR_FRAGMENTED_POOL);
62 CASE(VK_ERROR_UNKNOWN);
63 CASE(VK_ERROR_OUT_OF_POOL_MEMORY);
64 CASE(VK_ERROR_INVALID_EXTERNAL_HANDLE);
65 CASE(VK_ERROR_FRAGMENTATION);
66 CASE(VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS);
67 CASE(VK_PIPELINE_COMPILE_REQUIRED);
68 CASE(VK_ERROR_SURFACE_LOST_KHR);
69 CASE(VK_ERROR_NATIVE_WINDOW_IN_USE_KHR);
70 CASE(VK_SUBOPTIMAL_KHR);
71 CASE(VK_ERROR_OUT_OF_DATE_KHR);
72 CASE(VK_ERROR_INCOMPATIBLE_DISPLAY_KHR);
73 CASE(VK_ERROR_VALIDATION_FAILED_EXT);
74 CASE(VK_ERROR_INVALID_SHADER_NV);
75 CASE(VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR);
76 CASE(VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR);
77 CASE(VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR);
78 CASE(VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR);
79 CASE(VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR);
80 CASE(VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT);
81 CASE(VK_ERROR_NOT_PERMITTED_KHR);
82 CASE(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT);
83 CASE(VK_THREAD_IDLE_KHR);
84 CASE(VK_THREAD_DONE_KHR);
85 CASE(VK_OPERATION_DEFERRED_KHR);
86 CASE(VK_OPERATION_NOT_DEFERRED_KHR);
87 default:
return "Unknown error";
93 #define FN_MAP_TO(dst_t, dst_name, src_t, src_name) \
94 dst_t ff_vk_map_ ##src_name## _to_ ##dst_name(src_t src) \
97 MAP_TO(VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT, \
98 VK_IMAGE_USAGE_SAMPLED_BIT); \
99 MAP_TO(VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT, \
100 VK_IMAGE_USAGE_TRANSFER_SRC_BIT); \
101 MAP_TO(VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT, \
102 VK_IMAGE_USAGE_TRANSFER_DST_BIT); \
103 MAP_TO(VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT, \
104 VK_IMAGE_USAGE_STORAGE_BIT); \
105 MAP_TO(VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT, \
106 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT); \
107 MAP_TO(VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR, \
108 VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR); \
109 MAP_TO(VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR, \
110 VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR); \
111 MAP_TO(VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, \
112 VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR); \
113 MAP_TO(VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, \
114 VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR); \
115 MAP_TO(VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, \
116 VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT); \
120 #define MAP_TO(flag1, flag2) if (src & flag2) dst |= flag1;
121 FN_MAP_TO(VkFormatFeatureFlagBits2, feats, VkImageUsageFlags,
usage)
123 #define MAP_TO(flag1, flag2) if (src & flag1) dst |= flag2;
124 FN_MAP_TO(VkImageUsageFlags,
usage, VkFormatFeatureFlagBits2, feats)
131 for (
int i = 0;
i <
s->hwctx->nb_qf;
i++) {
134 for (
int j = 0; j <
s->nb_qfs; j++) {
135 if (
s->qfs[j] ==
s->hwctx->qf[
i].idx) {
143 s->qfs[
s->nb_qfs++] =
s->hwctx->qf[
i].idx;
151 s->props = (VkPhysicalDeviceProperties2) {
152 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
156 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES);
158 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES);
160 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES);
163 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR);
165 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT);
167 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR);
169 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV);
171 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT);
173 #ifdef VK_EXT_shader_long_vector
175 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_LONG_VECTOR_PROPERTIES_EXT);
178 s->feats = (VkPhysicalDeviceFeatures2) {
179 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
183 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES);
185 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT);
188 s->host_image_copy_layouts =
av_malloc(
sizeof(*
s->host_image_copy_layouts)*1024);
189 s->host_image_props.pCopySrcLayouts =
s->host_image_copy_layouts;
190 s->host_image_props.copySrcLayoutCount = 512;
191 s->host_image_props.pCopyDstLayouts =
s->host_image_copy_layouts + 512;
192 s->host_image_props.copyDstLayoutCount = 512;
194 vk->GetPhysicalDeviceProperties2(
s->hwctx->phys_dev, &
s->props);
197 if (
s->host_image_props.copySrcLayoutCount == 512 ||
198 s->host_image_props.copyDstLayoutCount == 512) {
199 VkImageLayout *new_array;
201 s->host_image_props.pCopySrcLayouts =
202 s->host_image_props.pCopyDstLayouts =
NULL;
203 s->host_image_props.copySrcLayoutCount =
204 s->host_image_props.copyDstLayoutCount = 0;
205 vk->GetPhysicalDeviceProperties2(
s->hwctx->phys_dev, &
s->props);
207 new_size =
s->host_image_props.copySrcLayoutCount +
208 s->host_image_props.copyDstLayoutCount;
209 new_size *=
sizeof(*
s->host_image_copy_layouts);
210 new_array =
av_realloc(
s->host_image_copy_layouts, new_size);
214 s->host_image_copy_layouts = new_array;
215 s->host_image_props.pCopySrcLayouts = new_array;
216 s->host_image_props.pCopyDstLayouts = new_array +
s->host_image_props.copySrcLayoutCount;
217 vk->GetPhysicalDeviceProperties2(
s->hwctx->phys_dev, &
s->props);
220 vk->GetPhysicalDeviceMemoryProperties(
s->hwctx->phys_dev, &
s->mprops);
221 vk->GetPhysicalDeviceFeatures2(
s->hwctx->phys_dev, &
s->feats);
223 for (
int i = 0;
i <
s->mprops.memoryTypeCount;
i++)
224 s->host_cached_flag |=
s->mprops.memoryTypes[
i].propertyFlags &
225 VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
232 vk->GetPhysicalDeviceQueueFamilyProperties2(
s->hwctx->phys_dev, &
s->tot_nb_qfs,
NULL);
234 s->qf_props =
av_calloc(
s->tot_nb_qfs,
sizeof(*
s->qf_props));
238 s->query_props =
av_calloc(
s->tot_nb_qfs,
sizeof(*
s->query_props));
244 s->video_props =
av_calloc(
s->tot_nb_qfs,
sizeof(*
s->video_props));
245 if (!
s->video_props) {
251 for (uint32_t
i = 0;
i <
s->tot_nb_qfs;
i++) {
252 s->qf_props[
i] = (VkQueueFamilyProperties2) {
253 .sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
257 VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR);
259 VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR);
262 vk->GetPhysicalDeviceQueueFamilyProperties2(
s->hwctx->phys_dev, &
s->tot_nb_qfs,
s->qf_props);
265 vk->GetPhysicalDeviceCooperativeMatrixPropertiesKHR(
s->hwctx->phys_dev,
266 &
s->coop_mat_props_nb,
NULL);
268 if (
s->coop_mat_props_nb) {
270 sizeof(VkCooperativeMatrixPropertiesKHR));
271 for (
int i = 0;
i <
s->coop_mat_props_nb;
i++) {
272 s->coop_mat_props[
i] = (VkCooperativeMatrixPropertiesKHR) {
273 .sType = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR,
277 vk->GetPhysicalDeviceCooperativeMatrixPropertiesKHR(
s->hwctx->phys_dev,
278 &
s->coop_mat_props_nb,
287 VkQueueFlagBits dev_family,
288 VkVideoCodecOperationFlagBitsKHR vid_ops)
290 for (
int i = 0;
i <
s->hwctx->nb_qf;
i++) {
291 if ((
s->hwctx->qf[
i].flags & dev_family) &&
292 (
s->hwctx->qf[
i].video_caps & vid_ops) == vid_ops) {
293 return &
s->hwctx->qf[
i];
308 vk->WaitForFences(
s->hwctx->act_dev, 1, &e->
fence, VK_TRUE, UINT64_MAX);
309 vk->DestroyFence(
s->hwctx->act_dev, e->
fence,
s->hwctx->alloc);
332 vk->DestroyDescriptorPool(
s->hwctx->act_dev, sd->
desc_pool,
344 vk->DestroyCommandPool(
s->hwctx->act_dev, pool->
cmd_buf_pools[
i],
s->hwctx->alloc);
347 vk->DestroyQueryPool(
s->hwctx->act_dev, pool->
query_pool,
s->hwctx->alloc);
357 int nb_queries, VkQueryType query_type,
int query_64bit,
358 const void *query_create_pnext)
365 VkCommandBufferAllocateInfo cbuf_create;
367 const VkQueryPoolVideoEncodeFeedbackCreateInfoKHR *ef =
NULL;
371 if (query_type == VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR) {
373 VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR);
392 for (
int i = 0;
i < nb_contexts;
i++) {
395 .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
396 .flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT |
397 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
398 .queueFamilyIndex = qf->
idx,
403 if (
ret != VK_SUCCESS) {
411 cbuf_create = (VkCommandBufferAllocateInfo) {
412 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
413 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
415 .commandBufferCount = 1,
417 ret = vk->AllocateCommandBuffers(
s->hwctx->act_dev, &cbuf_create,
419 if (
ret != VK_SUCCESS) {
429 VkQueryPoolCreateInfo query_pool_info = {
430 .sType = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
431 .pNext = query_create_pnext,
432 .queryType = query_type,
433 .queryCount = nb_queries*nb_contexts,
435 ret = vk->CreateQueryPool(
s->hwctx->act_dev, &query_pool_info,
437 if (
ret != VK_SUCCESS) {
450 if (query_type == VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR) {
451 int nb_results =
av_popcount(ef->encodeFeedbackFlags);
454 }
else if (query_type == VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR) {
481 VkFenceCreateInfo fence_create = {
482 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
483 .flags = VK_FENCE_CREATE_SIGNALED_BIT,
487 ret = vk->CreateFence(
s->hwctx->act_dev, &fence_create,
s->hwctx->alloc,
489 if (
ret != VK_SUCCESS) {
508 vk->GetDeviceQueue(
s->hwctx->act_dev, qf->
idx, e->
qi, &e->
queue);
519 void **
data, VkQueryResultFlagBits
flags)
523 VkQueryResultFlags qf =
flags & ~(VK_QUERY_RESULT_64_BIT |
524 VK_QUERY_RESULT_WITH_STATUS_BIT_KHR);
528 return VK_INCOMPLETE;
532 VK_QUERY_RESULT_64_BIT : 0x0;
534 VK_QUERY_RESULT_WITH_STATUS_BIT_KHR : 0x0;
539 return vk->GetQueryPoolResults(
s->hwctx->act_dev, pool->
query_pool,
554 vk->WaitForFences(
s->hwctx->act_dev, 1, &e->
fence, VK_TRUE, UINT64_MAX);
564 VkCommandBufferBeginInfo cmd_start = {
565 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
566 .flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
570 vk->WaitForFences(
s->hwctx->act_dev, 1, &e->
fence, VK_TRUE, UINT64_MAX);
571 vk->ResetFences(
s->hwctx->act_dev, 1, &e->
fence);
576 ret = vk->BeginCommandBuffer(e->
buf, &cmd_start);
577 if (
ret != VK_SUCCESS) {
606 vkfc->unlock_frame(hwfc, vkf);
630 for (
int i = 0;
i < nb_deps;
i++) {
668 #define ARR_REALLOC(str, arr, alloc_s, cnt) \
670 arr = av_fast_realloc(str->arr, alloc_s, (cnt + 1)*sizeof(*arr)); \
672 ff_vk_exec_discard_deps(s, e); \
673 return AVERROR(ENOMEM); \
690 vk->DestroySemaphore(
s->hwctx->act_dev, ts->
sem[
i],
s->hwctx->alloc);
696 VkSemaphore sem, uint64_t
val,
697 VkPipelineStageFlagBits2 stage)
703 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
713 VkSemaphore *sem,
int nb,
714 VkPipelineStageFlagBits2 stage,
726 for (
int i = 0;
i < nb;
i++) {
727 VkSemaphoreSubmitInfo *sem_sig;
731 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
740 buf_size =
sizeof(*ts) +
sizeof(VkSemaphore)*nb;
747 memcpy(ts->sem, sem, nb*
sizeof(*sem));
763 for (
int i = 0;
i < nb;
i++) {
772 for (
int i = 0;
i < nb;
i++)
773 vk->DestroySemaphore(
s->hwctx->act_dev, sem[
i],
s->hwctx->alloc);
779 VkPipelineStageFlagBits2 wait_stage,
780 VkPipelineStageFlagBits2 signal_stage)
782 uint8_t *frame_locked;
783 uint8_t *frame_update;
786 VkImageLayout *layout_dst;
787 uint32_t *queue_family_dst;
788 VkAccessFlagBits *access_dst;
823 vkfc->lock_frame(hwfc, vkf);
828 for (
int i = 0;
i < nb_images;
i++) {
830 VkSemaphoreSubmitInfo *sem_sig;
831 uint64_t **sem_sig_val_dst;
838 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
839 .semaphore = vkf->sem[
i],
840 .value = vkf->sem_value[
i],
841 .stageMask = wait_stage,
845 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
846 .semaphore = vkf->sem[
i],
847 .value = vkf->sem_value[
i] + 1,
848 .stageMask = signal_stage,
859 VkImageMemoryBarrier2 *bar, uint32_t *nb_img_bar)
878 VkSemaphore *
dst, uint64_t *dst_val,
881 uint64_t **sem_sig_val_dst;
907 VkCommandBufferSubmitInfo cmd_buf_info = (VkCommandBufferSubmitInfo) {
908 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
909 .commandBuffer = e->
buf,
911 VkSubmitInfo2 submit_info = (VkSubmitInfo2) {
912 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
913 .pCommandBufferInfos = &cmd_buf_info,
914 .commandBufferInfoCount = 1,
917 .pSignalSemaphoreInfos = e->
sem_sig,
921 ret = vk->EndCommandBuffer(e->
buf);
922 if (
ret != VK_SUCCESS) {
929 s->hwctx->lock_queue(
s->device, e->
qf, e->
qi);
931 s->hwctx->unlock_queue(
s->device, e->
qf, e->
qi);
933 if (
ret != VK_SUCCESS) {
953 for (
int i = 0;
i < nb_images;
i++) {
959 vkfc->unlock_frame(hwfc, vkf);
970 VkMemoryPropertyFlagBits req_flags,
void *alloc_extension,
971 VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
977 VkMemoryAllocateInfo alloc_info = {
978 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
979 .pNext = alloc_extension,
982 alloc_info.allocationSize = req->size;
986 for (
int i = 0;
i <
s->mprops.memoryTypeCount;
i++) {
988 if (!(req->memoryTypeBits & (1 <<
i)))
992 if ((req_flags != UINT32_MAX) &&
993 ((
s->mprops.memoryTypes[
i].propertyFlags & req_flags) != req_flags))
1007 alloc_info.memoryTypeIndex =
index;
1009 ret = vk->AllocateMemory(
s->hwctx->act_dev, &alloc_info,
1010 s->hwctx->alloc, mem);
1011 if (
ret != VK_SUCCESS)
1015 *mem_flags |=
s->mprops.memoryTypes[
index].propertyFlags;
1021 void *pNext,
void *alloc_pNext,
1022 VkBufferUsageFlags
usage, VkMemoryPropertyFlagBits
flags)
1029 VkBufferCreateInfo buf_spawn = {
1030 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
1033 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
1034 .size =
flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT ?
1035 FFALIGN(
size,
s->props.properties.limits.minMemoryMapAlignment) :
1039 VkMemoryAllocateFlagsInfo alloc_flags = {
1040 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
1041 .flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
1043 VkBufferMemoryRequirementsInfo2 req_desc = {
1044 .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
1046 VkMemoryDedicatedAllocateInfo ded_alloc = {
1047 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
1048 .pNext = alloc_pNext,
1050 VkMemoryDedicatedRequirements ded_req = {
1051 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
1053 VkMemoryRequirements2 req = {
1054 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
1059 "usage: 0x%x, flags: 0x%x\n",
1062 ret = vk->CreateBuffer(
s->hwctx->act_dev, &buf_spawn,
s->hwctx->alloc, &buf->
buf);
1063 if (
ret != VK_SUCCESS) {
1069 req_desc.buffer = buf->
buf;
1071 vk->GetBufferMemoryRequirements2(
s->hwctx->act_dev, &req_desc, &req);
1074 use_ded_mem = ded_req.prefersDedicatedAllocation |
1075 ded_req.requiresDedicatedAllocation;
1077 ded_alloc.buffer = buf->
buf;
1078 ded_alloc.pNext = alloc_pNext;
1079 alloc_pNext = &ded_alloc;
1082 if (
usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT) {
1083 alloc_flags.pNext = alloc_pNext;
1084 alloc_pNext = &alloc_flags;
1092 ret = vk->BindBufferMemory(
s->hwctx->act_dev, buf->
buf, buf->
mem, 0);
1093 if (
ret != VK_SUCCESS) {
1099 if (
usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT) {
1100 VkBufferDeviceAddressInfo address_info = {
1101 .sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
1104 buf->
address = vk->GetBufferDeviceAddress(
s->hwctx->act_dev, &address_info);
1113 int nb_buffers,
int invalidate)
1117 VkMappedMemoryRange inval_list[64];
1118 int inval_count = 0;
1120 for (
int i = 0;
i < nb_buffers;
i++) {
1122 ret = vk->MapMemory(
s->hwctx->act_dev, buf[
i]->
mem, 0,
1123 VK_WHOLE_SIZE, 0, &
dst);
1124 if (
ret != VK_SUCCESS) {
1137 for (
int i = 0;
i < nb_buffers;
i++) {
1138 const VkMappedMemoryRange ival_buf = {
1139 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
1140 .memory = buf[
i]->
mem,
1141 .size = VK_WHOLE_SIZE,
1143 if (buf[
i]->
flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
1145 inval_list[inval_count++] = ival_buf;
1149 ret = vk->InvalidateMappedMemoryRanges(
s->hwctx->act_dev, inval_count,
1151 if (
ret != VK_SUCCESS) {
1162 VkDeviceSize
offset, VkDeviceSize mem_size,
1168 if (buf->
host_ref || buf->
flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
1171 const VkMappedMemoryRange flush_data = {
1172 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
1179 ret = vk->FlushMappedMemoryRanges(
s->hwctx->act_dev, 1, &flush_data);
1181 ret = vk->InvalidateMappedMemoryRanges(
s->hwctx->act_dev, 1, &flush_data);
1183 if (
ret != VK_SUCCESS) {
1198 VkMappedMemoryRange flush_list[64];
1199 int flush_count = 0;
1202 for (
int i = 0;
i < nb_buffers;
i++) {
1203 const VkMappedMemoryRange flush_buf = {
1204 .sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
1205 .memory = buf[
i]->
mem,
1206 .size = VK_WHOLE_SIZE,
1210 if (buf[
i]->
flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)
1212 flush_list[flush_count++] = flush_buf;
1217 ret = vk->FlushMappedMemoryRanges(
s->hwctx->act_dev, flush_count,
1219 if (
ret != VK_SUCCESS) {
1226 for (
int i = 0;
i < nb_buffers;
i++) {
1227 vk->UnmapMemory(
s->hwctx->act_dev, buf[
i]->
mem);
1238 if (!buf || !
s->hwctx)
1243 if (buf->
buf != VK_NULL_HANDLE)
1244 vk->DestroyBuffer(
s->hwctx->act_dev, buf->
buf,
s->hwctx->alloc);
1245 if (buf->
mem != VK_NULL_HANDLE)
1246 vk->FreeMemory(
s->hwctx->act_dev, buf->
mem,
s->hwctx->alloc);
1250 buf->
buf = VK_NULL_HANDLE;
1251 buf->
mem = VK_NULL_HANDLE;
1278 void *create_pNext,
size_t size,
1279 VkMemoryPropertyFlagBits mem_props)
1315 if (mem_props & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
1330 VkExternalMemoryBufferCreateInfo *create_desc,
1331 VkImportMemoryHostPointerInfoEXT *import_desc,
1332 VkMemoryHostPointerPropertiesEXT props)
1338 VkBufferCreateInfo buf_spawn = {
1339 .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
1340 .pNext = create_desc,
1342 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
1345 VkMemoryRequirements req = {
1347 .alignment =
s->hprops.minImportedHostPointerAlignment,
1348 .memoryTypeBits = props.memoryTypeBits,
1352 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
1353 import_desc, &vkb->
flags, &vkb->
mem);
1357 ret = vk->CreateBuffer(
s->hwctx->act_dev, &buf_spawn,
s->hwctx->alloc, &vkb->
buf);
1358 if (
ret != VK_SUCCESS) {
1359 vk->FreeMemory(
s->hwctx->act_dev, vkb->
mem,
s->hwctx->alloc);
1363 ret = vk->BindBufferMemory(
s->hwctx->act_dev, vkb->
buf, vkb->
mem, 0);
1364 if (
ret != VK_SUCCESS) {
1365 vk->FreeMemory(
s->hwctx->act_dev, vkb->
mem,
s->hwctx->alloc);
1366 vk->DestroyBuffer(
s->hwctx->act_dev, vkb->
buf,
s->hwctx->alloc);
1383 VkBufferUsageFlags
usage)
1389 VkExternalMemoryBufferCreateInfo create_desc = {
1390 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
1391 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
1393 VkMemoryAllocateFlagsInfo alloc_flags = {
1394 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
1395 .flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
1397 VkImportMemoryHostPointerInfoEXT import_desc = {
1398 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
1399 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
1400 .pNext =
usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT ? &alloc_flags :
NULL,
1402 VkMemoryHostPointerPropertiesEXT props;
1412 offs = (uintptr_t)src_data %
s->hprops.minImportedHostPointerAlignment;
1413 import_desc.pHostPointer = src_data - offs;
1415 props = (VkMemoryHostPointerPropertiesEXT) {
1416 VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
1418 ret = vk->GetMemoryHostPointerPropertiesEXT(
s->hwctx->act_dev,
1419 import_desc.handleType,
1420 import_desc.pHostPointer,
1422 if (!(
ret == VK_SUCCESS && props.memoryTypeBits))
1431 const ptrdiff_t src_offset = src_data - src_buf->
data;
1432 buffer_size = offs + (src_buf->
size - src_offset);
1433 buffer_size =
FFALIGN(buffer_size,
s->props.properties.limits.minMemoryMapAlignment);
1434 buffer_size =
FFALIGN(buffer_size,
s->hprops.minImportedHostPointerAlignment);
1444 buffer_size, &create_desc, &import_desc,
1452 if (
usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT) {
1453 VkBufferDeviceAddressInfo address_info = {
1454 .sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
1457 vkb->
address = vk->GetBufferDeviceAddress(
s->hwctx->act_dev, &address_info);
1464 vkb->
size = buffer_size - offs;
1465 vkb->
flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1480 VkShaderStageFlagBits stage)
1484 pc->stageFlags = stage;
1491 int unnorm_coords, VkFilter
filt)
1496 VkSamplerCreateInfo sampler_info = {
1497 .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
1499 .minFilter = sampler_info.magFilter,
1500 .mipmapMode = unnorm_coords ? VK_SAMPLER_MIPMAP_MODE_NEAREST :
1501 VK_SAMPLER_MIPMAP_MODE_LINEAR,
1502 .addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
1503 .addressModeV = sampler_info.addressModeU,
1504 .addressModeW = sampler_info.addressModeU,
1505 .anisotropyEnable = VK_FALSE,
1506 .compareOp = VK_COMPARE_OP_NEVER,
1507 .borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
1508 .unnormalizedCoordinates = unnorm_coords,
1511 ret = vk->CreateSampler(
s->hwctx->act_dev, &sampler_info,
1512 s->hwctx->alloc, sampler);
1513 if (
ret != VK_SUCCESS) {
1529 static const VkImageAspectFlags plane_aspect[] = { VK_IMAGE_ASPECT_PLANE_0_BIT,
1530 VK_IMAGE_ASPECT_PLANE_1_BIT,
1531 VK_IMAGE_ASPECT_PLANE_2_BIT, };
1534 return VK_IMAGE_ASPECT_COLOR_BIT;
1536 return plane_aspect[
p];
1597 int lut_tmp[4] = { lut[0], lut[1], lut[2], lut[3] };
1598 for (
int i = 0;
i < 4;
i++)
1599 lut[lut_tmp[
i]] =
i;
1620 const char *rep_tab[] = {
1626 return rep_tab[rep_fmt];
1632 const char *rep_tab[] = {
1638 return rep_tab[rep_fmt];
1646 const char *rep_tab[] = {
1652 return rep_tab[rep_fmt];
1656 const char *rep_tab[] = {
1662 return rep_tab[rep_fmt];
1666 const char *rep_tab[] = {
1672 return rep_tab[rep_fmt];
1683 const char *rep_tab[] = {
1689 return rep_tab[rep_fmt];
1721 const char *rep_tab[] = {
1727 return rep_tab[rep_fmt];
1733 const char *rep_tab[] = {
1739 return rep_tab[rep_fmt];
1742 const char *rep_tab[] = {
1748 return rep_tab[rep_fmt];
1753 const char *rep_tab[] = {
1759 return rep_tab[rep_fmt];
1764 const char *rep_tab[] = {
1770 return rep_tab[rep_fmt];
1778 const char *rep_tab[] = {
1784 return rep_tab[rep_fmt];
1803 vk->DestroyImageView(
s->hwctx->act_dev, iv->
views[
i],
s->hwctx->alloc);
1810 #define REPS_FMT(fmt) \
1811 [FF_VK_REP_NATIVE] = fmt ## _UINT, \
1812 [FF_VK_REP_FLOAT] = fmt ## _UNORM, \
1813 [FF_VK_REP_INT] = fmt ## _SINT, \
1814 [FF_VK_REP_UINT] = fmt ## _UINT,
1816 #define REPS_FMT_PACK(fmt, num) \
1817 [FF_VK_REP_NATIVE] = fmt ## _UINT_PACK ## num, \
1818 [FF_VK_REP_FLOAT] = fmt ## _UNORM_PACK ## num, \
1819 [FF_VK_REP_INT] = fmt ## _SINT_PACK ## num, \
1820 [FF_VK_REP_UINT] = fmt ## _UINT_PACK ## num,
1826 VK_FORMAT_B5G6R5_UNORM_PACK16,
1827 VK_FORMAT_B5G6R5_UNORM_PACK16,
1828 VK_FORMAT_UNDEFINED,
1829 VK_FORMAT_UNDEFINED,
1832 VK_FORMAT_R5G6B5_UNORM_PACK16,
1833 VK_FORMAT_R5G6B5_UNORM_PACK16,
1834 VK_FORMAT_UNDEFINED,
1835 VK_FORMAT_UNDEFINED,
1846 {
REPS_FMT(VK_FORMAT_R16G16B16A16) },
1849 VK_FORMAT_R32_SFLOAT,
1854 VK_FORMAT_R32G32B32_SFLOAT,
1855 VK_FORMAT_R32G32B32_SFLOAT,
1856 VK_FORMAT_UNDEFINED,
1857 VK_FORMAT_UNDEFINED,
1860 VK_FORMAT_R32G32B32A32_SFLOAT,
1861 VK_FORMAT_R32G32B32A32_SFLOAT,
1862 VK_FORMAT_UNDEFINED,
1863 VK_FORMAT_UNDEFINED,
1866 VK_FORMAT_R32G32B32_UINT,
1867 VK_FORMAT_UNDEFINED,
1868 VK_FORMAT_R32G32B32_SINT,
1869 VK_FORMAT_R32G32B32_UINT,
1872 VK_FORMAT_R32G32B32A32_UINT,
1873 VK_FORMAT_UNDEFINED,
1874 VK_FORMAT_R32G32B32A32_SINT,
1875 VK_FORMAT_R32G32B32A32_UINT,
1878 #undef REPS_FMT_PACK
1881 if (fmt == VK_FORMAT_UNDEFINED)
1882 return VK_FORMAT_UNDEFINED;
1889 return fmts_map[
i][rep_fmt];
1892 return VK_FORMAT_UNDEFINED;
1896 VkImageView *img_view, VkImageAspectFlags *aspect,
1907 VkImageViewUsageCreateInfo view_usage_info = {
1908 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
1909 .usage = vkfc->usage &
1910 (~(VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR |
1911 VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR)),
1913 VkImageViewCreateInfo view_create_info = {
1914 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
1915 .pNext = &view_usage_info,
1916 .image = vkf->
img[
FFMIN(plane, nb_images - 1)],
1917 .viewType = VK_IMAGE_VIEW_TYPE_2D,
1920 .subresourceRange = {
1926 if (view_create_info.format == VK_FORMAT_UNDEFINED) {
1928 "of format %i and mode %i\n",
1929 rep_fmts[plane], rep_fmt);
1933 ret = vk->CreateImageView(
s->hwctx->act_dev, &view_create_info,
1934 s->hwctx->alloc, img_view);
1935 if (
ret != VK_SUCCESS) {
1941 *aspect = view_create_info.subresourceRange.aspectMask;
1962 const size_t buf_size =
sizeof(*iv) + nb_planes*
sizeof(VkImageView);
1967 for (
int i = 0;
i < nb_planes;
i++) {
1968 VkImageViewUsageCreateInfo view_usage_info = {
1969 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
1970 .usage = vkfc->usage &
1971 (~(VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR |
1972 VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR)),
1974 VkImageViewCreateInfo view_create_info = {
1975 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
1976 .pNext = &view_usage_info,
1977 .image = vkf->
img[
FFMIN(
i, nb_images - 1)],
1978 .viewType = VK_IMAGE_VIEW_TYPE_2D,
1981 .subresourceRange = {
1987 if (view_create_info.format == VK_FORMAT_UNDEFINED) {
1989 "of format %i and mode %i\n",
1990 rep_fmts[
i], rep_fmt);
1995 ret = vk->CreateImageView(
s->hwctx->act_dev, &view_create_info,
1996 s->hwctx->alloc, &iv->views[
i]);
1997 if (
ret != VK_SUCCESS) {
2018 memcpy(views, iv->views, nb_planes*
sizeof(*views));
2023 for (
int i = 0;
i < iv->nb_views;
i++)
2024 vk->DestroyImageView(
s->hwctx->act_dev, iv->views[
i],
s->hwctx->alloc);
2030 AVFrame *pic, VkImageMemoryBarrier2 *bar,
int *nb_bar,
2031 VkPipelineStageFlags2 src_stage,
2032 VkPipelineStageFlags2 dst_stage,
2033 VkAccessFlagBits2 new_access,
2034 VkImageLayout new_layout,
2047 for (
int i = 0;
i < nb_images;
i++) {
2048 bar[*nb_bar] = (VkImageMemoryBarrier2) {
2049 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
2051 .srcStageMask = src_stage,
2052 .dstStageMask = dst_stage,
2054 .dstAccessMask = new_access,
2056 .newLayout = new_layout,
2058 .dstQueueFamilyIndex = new_qf,
2059 .image = vkf->
img[
i],
2060 .subresourceRange = (VkImageSubresourceRange) {
2061 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2073 VkPipelineStageFlags stage, VkSpecializationInfo *spec,
2074 uint32_t wg_size[3], uint32_t required_subgroup_size)
2079 memcpy(shd->
lg_size, wg_size, 3*
sizeof(uint32_t));
2081 switch (shd->
stage) {
2082 case VK_SHADER_STAGE_ANY_HIT_BIT_KHR:
2083 case VK_SHADER_STAGE_CALLABLE_BIT_KHR:
2084 case VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR:
2085 case VK_SHADER_STAGE_INTERSECTION_BIT_KHR:
2086 case VK_SHADER_STAGE_MISS_BIT_KHR:
2087 case VK_SHADER_STAGE_RAYGEN_BIT_KHR:
2088 shd->
bind_point = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
2090 case VK_SHADER_STAGE_COMPUTE_BIT:
2091 shd->
bind_point = VK_PIPELINE_BIND_POINT_COMPUTE;
2094 shd->
bind_point = VK_PIPELINE_BIND_POINT_GRAPHICS;
2102 VkPipelineStageFlags stage,
2103 const char *extensions[],
int nb_extensions,
2104 int lg_x,
int lg_y,
int lg_z,
2105 uint32_t required_subgroup_size)
2108 (uint32_t []) { lg_x, lg_y, lg_z }, required_subgroup_size);
2114 if (required_subgroup_size) {
2115 shd->
subgroup_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO;
2116 shd->
subgroup_info.requiredSubgroupSize = required_subgroup_size;
2120 (stage == VK_SHADER_STAGE_TASK_BIT_EXT ||
2121 stage == VK_SHADER_STAGE_MESH_BIT_EXT) ?
2123 (shd->
bind_point == VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR) ?
2125 (shd->
bind_point == VK_PIPELINE_BIND_POINT_COMPUTE) ?
2126 "Compute" :
"Graphics",
2132 GLSLC(0, #define IS_WITHIN(v1, v2) ((v1.x < v2.x) && (v1.y < v2.y)) );
2134 GLSLC(0, #extension GL_EXT_scalar_block_layout : require );
2135 GLSLC(0, #extension GL_EXT_shader_explicit_arithmetic_types : require );
2136 GLSLC(0, #extension GL_EXT_control_flow_attributes : require );
2137 GLSLC(0, #extension GL_EXT_shader_image_load_formatted : require );
2139 GLSLC(0, #extension GL_EXT_expect_assume : require );
2141 GLSLC(0, #define assumeEXT(x) (x) );
2142 GLSLC(0, #define expectEXT(x,
c) (x) );
2146 GLSLC(0, #extension GL_EXT_debug_printf : require );
2150 if (stage == VK_SHADER_STAGE_TASK_BIT_EXT ||
2151 stage == VK_SHADER_STAGE_MESH_BIT_EXT)
2152 GLSLC(0, #extension GL_EXT_mesh_shader : require );
2154 for (
int i = 0;
i < nb_extensions;
i++)
2155 GLSLF(0, #extension %
s : %
s ,extensions[
i],
"require");
2158 GLSLF(0,
layout (local_size_x = %
i, local_size_y = %
i, local_size_z = %
i) in;
2168 const char *
p = shd->
src.str;
2169 const char *start =
p;
2170 const size_t len = strlen(
p);
2175 for (
int i = 0;
i <
len;
i++) {
2191 VkPipelineLayoutCreateInfo pipeline_layout_info;
2194 pipeline_layout_info = (VkPipelineLayoutCreateInfo) {
2195 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
2202 ret = vk->CreatePipelineLayout(
s->hwctx->act_dev, &pipeline_layout_info,
2204 if (
ret != VK_SUCCESS) {
2214 VkShaderModule *
mod,
2215 const uint8_t *spirv,
size_t spirv_len)
2220 VkShaderModuleCreateInfo shader_module_info = {
2221 .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
2224 .pCode = (
void *)spirv,
2225 .codeSize = spirv_len,
2228 ret = vk->CreateShaderModule(
s->hwctx->act_dev, &shader_module_info,
2229 s->hwctx->alloc,
mod);
2230 if (
ret != VK_SUCCESS) {
2240 VkShaderModule
mod,
const char *entrypoint)
2245 VkComputePipelineCreateInfo pipeline_create_info = {
2246 .sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
2249 .stage = (VkPipelineShaderStageCreateInfo) {
2250 .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
2253 .pName = entrypoint,
2255 VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT : 0x0,
2256 .stage = shd->
stage,
2262 ret = vk->CreateComputePipelines(
s->hwctx->act_dev, VK_NULL_HANDLE, 1,
2263 &pipeline_create_info,
2265 if (
ret != VK_SUCCESS) {
2275 const uint8_t *spirv,
size_t spirv_len,
2276 size_t *binary_size,
const char *entrypoint)
2281 VkShaderCreateInfoEXT shader_obj_create = {
2282 .sType = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT,
2284 VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT : 0x0,
2285 .stage = shd->
stage,
2287 .codeType = VK_SHADER_CODE_TYPE_SPIRV_EXT,
2289 .codeSize = spirv_len,
2290 .pName = entrypoint,
2298 ret = vk->CreateShadersEXT(
s->hwctx->act_dev, 1, &shader_obj_create,
2299 s->hwctx->alloc, &shd->
object);
2300 if (
ret != VK_SUCCESS) {
2306 if (vk->GetShaderBinaryDataEXT(
s->hwctx->act_dev, shd->
object,
2307 binary_size,
NULL) != VK_SUCCESS)
2318 int has_singular = 0;
2319 int max_descriptors = 0;
2326 (max_descriptors <= s->push_desc_props.maxPushDescriptors) &&
2328 (has_singular == 0);
2332 VkDescriptorSetLayoutCreateInfo desc_layout_create = {
2333 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
2334 .bindingCount =
set->nb_bindings,
2335 .pBindings =
set->binding,
2337 VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR :
2341 ret = vk->CreateDescriptorSetLayout(
s->hwctx->act_dev,
2342 &desc_layout_create,
2345 if (
ret != VK_SUCCESS) {
2356 const char *spirv,
size_t spirv_len,
2357 const char *entrypoint)
2361 VkSpecializationMapEntry spec_entries[3];
2362 VkSpecializationInfo spec_info;
2363 size_t input_size = spirv_len, binary_size = 0;
2367 spec_info = (VkSpecializationInfo) {
2368 .pMapEntries = spec_entries,
2377 for (
int i = 0;
i < 3;
i++) {
2379 .constantID = 253 +
i,
2381 .
size =
sizeof(uint32_t),
2387 shd->
lg_size, 3*
sizeof(uint32_t));
2390 #if CONFIG_SHADER_COMPRESSION
2397 spirv_len = out_len;
2411 &binary_size, entrypoint);
2421 case VK_PIPELINE_BIND_POINT_COMPUTE:
2432 vk->DestroyShaderModule(
s->hwctx->act_dev,
mod,
s->hwctx->alloc);
2442 if (input_size != spirv_len)
2445 if (binary_size != spirv_len)
2454 #if CONFIG_SHADER_COMPRESSION
2469 [VK_DESCRIPTOR_TYPE_SAMPLER] = {
sizeof(VkDescriptorImageInfo),
"sampler", 1, 0, 0, 0, },
2470 [VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE] = {
sizeof(VkDescriptorImageInfo),
"texture", 1, 0, 1, 0, },
2471 [VK_DESCRIPTOR_TYPE_STORAGE_IMAGE] = {
sizeof(VkDescriptorImageInfo),
"image", 1, 1, 1, 0, },
2472 [VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT] = {
sizeof(VkDescriptorImageInfo),
"subpassInput", 1, 0, 0, 0, },
2473 [VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER] = {
sizeof(VkDescriptorImageInfo),
"sampler", 1, 0, 1, 0, },
2474 [VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER] = {
sizeof(VkDescriptorBufferInfo),
NULL, 1, 0, 0, 1, },
2475 [VK_DESCRIPTOR_TYPE_STORAGE_BUFFER] = {
sizeof(VkDescriptorBufferInfo),
"buffer", 0, 1, 0, 1, },
2476 [VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC] = {
sizeof(VkDescriptorBufferInfo),
NULL, 1, 0, 0, 1, },
2477 [VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC] = {
sizeof(VkDescriptorBufferInfo),
"buffer", 0, 1, 0, 1, },
2478 [VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER] = {
sizeof(VkBufferView),
"samplerBuffer", 1, 0, 0, 0, },
2479 [VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER] = {
sizeof(VkBufferView),
"imageBuffer", 1, 0, 0, 0, },
2484 int singular,
int print_to_shader_only)
2486 if (print_to_shader_only)
2493 for (
int i = 0;
i < nb;
i++) {
2494 set->binding[
i].binding =
i;
2495 set->binding[
i].descriptorType =
desc[
i].type;
2497 set->binding[
i].stageFlags =
desc[
i].stages;
2498 set->binding[
i].pImmutableSamplers =
desc[
i].samplers;
2501 for (
int i = 0;
i < nb;
i++) {
2515 set->singular = singular;
2516 set->nb_bindings = nb;
2523 for (
int i = 0;
i < nb;
i++) {
2527 if (
desc[
i].mem_layout &&
2528 (
desc[
i].
type != VK_DESCRIPTOR_TYPE_STORAGE_IMAGE))
2541 if (
desc[
i].
type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
2542 if (
desc[
i].mem_layout) {
2543 int len = strlen(
desc[
i].mem_layout);
2544 if (
desc[
i].mem_layout[
len - 1] ==
'i' &&
2545 desc[
i].mem_layout[
len - 2] ==
'u') {
2547 }
else if (
desc[
i].mem_layout[
len - 1] ==
'i') {
2562 if (
desc[
i].buf_elems) {
2571 if (
desc[
i].elems > 0)
2597 VkDescriptorSetLayout *tmp_layouts;
2598 VkDescriptorSetAllocateInfo set_alloc_info;
2599 VkDescriptorPoolCreateInfo pool_create_info;
2604 pool_create_info = (VkDescriptorPoolCreateInfo) {
2605 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
2612 ret = vk->CreateDescriptorPool(
s->hwctx->act_dev, &pool_create_info,
2614 if (
ret != VK_SUCCESS) {
2620 tmp_layouts =
av_malloc_array(pool_create_info.maxSets,
sizeof(*tmp_layouts));
2629 set_alloc_info = (VkDescriptorSetAllocateInfo) {
2630 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
2632 .pSetLayouts = tmp_layouts,
2633 .descriptorSetCount = pool_create_info.maxSets,
2637 sizeof(*tmp_layouts));
2642 ret = vk->AllocateDescriptorSets(
s->hwctx->act_dev, &set_alloc_info,
2645 if (
ret != VK_SUCCESS) {
2667 VkWriteDescriptorSet *write_info)
2676 vk->UpdateDescriptorSets(
s->hwctx->act_dev, 1, write_info, 0,
NULL);
2680 vk->CmdPushDescriptorSetKHR(e->
buf,
2687 vk->UpdateDescriptorSets(
s->hwctx->act_dev, 1, write_info, 0,
NULL);
2694 VkImageView view, VkImageLayout
layout,
2699 VkDescriptorImageInfo desc_pool_write_info_img = {
2704 VkWriteDescriptorSet desc_pool_write_info = {
2705 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
2707 .descriptorCount = 1,
2708 .dstArrayElement = offs,
2709 .descriptorType = desc_set->
binding[bind].descriptorType,
2710 .pImageInfo = &desc_pool_write_info_img,
2719 VkImageView *views,
int set,
int binding,
2720 VkImageLayout
layout, VkSampler sampler)
2725 for (
int i = 0;
i < nb_planes;
i++)
2732 int set,
int bind,
int elem,
2738 VkDescriptorBufferInfo desc_pool_write_info_buf = {
2743 VkWriteDescriptorSet desc_pool_write_info = {
2744 .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
2746 .descriptorCount = 1,
2747 .dstArrayElement = elem,
2748 .descriptorType = desc_set->
binding[bind].descriptorType,
2749 .pBufferInfo = &desc_pool_write_info_buf,
2758 VkShaderStageFlagBits stage,
2773 VkShaderStageFlagBits stages = shd->
stage;
2774 vk->CmdBindShadersEXT(e->
buf, 1, &stages, &shd->
object);
2796 if (shd->shader.module)
2797 vk->DestroyShaderModule(
s->hwctx->act_dev, shd->shader.module,
2802 vk->DestroyShaderEXT(
s->hwctx->act_dev, shd->
object,
s->hwctx->alloc);
2804 vk->DestroyPipeline(
s->hwctx->act_dev, shd->
pipeline,
s->hwctx->alloc);
2811 vk->DestroyDescriptorSetLayout(
s->hwctx->act_dev, shd->
desc_layout[
i],
2832 static const AVClass vulkan_context_class = {
2838 memset(
s, 0,
sizeof(*
s));
2839 s->log_parent = log_parent;
2840 s->class = &vulkan_context_class;
2848 s->hwfc =
s->frames->hwctx;
2850 device_ref =
s->frames->device_ref;
2854 if (!
s->device_ref) {
2860 s->hwctx =
s->device->hwctx;
2863 s->hwctx->nb_enabled_dev_extensions);
2865 s->hwctx->nb_enabled_inst_extensions);