Gears-C/src/renderer_vulkan.c

1698 lines
48 KiB
C

// ::Vulkan::Globals::Start::
static Renderer renderer = {
.vk = {
.queues = {
.graphics = -1,
.transfer = -1,
},
.sc = {
.format = INT_MAX,
.color_space = INT_MAX,
.present_mode = INT_MAX,
},
}
};
static u32 vk_thread_indices[10] = {0};
#if __linux__
pthread_cond_t cond;
#elif _WIN32
# error threading not yet implemented
#endif
// ::Vulkan::Globals::End::
// ::Vulkan::Includes::CFiles::Start::
#include "renderer_vulkan_public.c"
// ::Vulkan::Includes::CFiles::End::
// ::Vulkan::Util::Functions::Start::
static inline u32 GetFrameIndex()
{
return renderer.frame_state.frame_cnt % FRAME_OVERLAP;
}
static inline VkCommandBuffer GetFrameCmdBuf()
{
return renderer.vk.frame.buffers[GetFrameIndex()];
}
static inline VkFence *GetFrameRenderFence()
{
return &renderer.vk.frame.render_fences[GetFrameIndex()];
}
static inline VkSemaphore GetFrameRenderSem()
{
return renderer.vk.frame.render_sems[GetFrameIndex()];
}
static inline VkSemaphore GetFrameSwapSem()
{
return renderer.vk.frame.swapchain_sems[GetFrameIndex()];
}
static inline u32 *GetFrameBufferCount()
{
return &renderer.vk.frame.buffer_counts[GetFrameIndex()];
}
static inline RenderBuffer *GetFrameRenderBuffers()
{
return renderer.vk.frame.buffer_destroy_queues[GetFrameIndex()];
}
static inline void CopyImageToImage(VkCommandBuffer cmd, VkImage src, VkImage dst, VkExtent2D src_ext, VkExtent2D dst_ext)
{
VkImageBlit2 blit = {
.sType = STYPE(IMAGE_BLIT_2),
.srcOffsets = {
{0},
{ .x = (i32)src_ext.width, .y = (i32)src_ext.height, .z = 1 },
},
.dstOffsets = {
{0},
{ .x = (i32)dst_ext.width, .y = (i32)dst_ext.height, .z = 1 },
},
.srcSubresource = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.baseArrayLayer = 0,
.layerCount = 1,
.mipLevel = 0,
},
.dstSubresource = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.baseArrayLayer = 0,
.layerCount = 1,
.mipLevel = 0,
},
};
VkBlitImageInfo2 blit_info = {
.sType = STYPE(BLIT_IMAGE_INFO_2),
.srcImage = src,
.srcImageLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
.dstImage = dst,
.dstImageLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
.filter = VK_FILTER_LINEAR,
.regionCount = 1,
.pRegions = &blit,
};
vkCmdBlitImage2(cmd, &blit_info);
}
static inline void TransitionImageLayout(VkCommandBuffer cmd, VkImage img, VkImageLayout curr, VkImageLayout new)
{
VkImageMemoryBarrier2 barrier = {
.sType = STYPE(IMAGE_MEMORY_BARRIER_2),
.srcStageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
.srcAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT,
.dstStageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
.dstAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT | VK_ACCESS_2_MEMORY_READ_BIT,
.oldLayout = curr,
.newLayout = new,
.image = img,
.subresourceRange = {
.aspectMask = new == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT,
.baseMipLevel = 0,
.levelCount = VK_REMAINING_MIP_LEVELS,
.baseArrayLayer = 0,
.layerCount = VK_REMAINING_ARRAY_LAYERS,
},
};
VkDependencyInfo dep_info = {
.sType = STYPE(DEPENDENCY_INFO),
.imageMemoryBarrierCount = 1,
.pImageMemoryBarriers = &barrier,
};
vkCmdPipelineBarrier2(cmd, &dep_info);
}
static inline void TransitionImage(VkCommandBuffer cmd, Image *img, VkImageLayout new)
{
TransitionImageLayout(cmd, img->img, img->curr_layout, new);
img->curr_layout = new;
}
// ::Vulkan::Util::Functions::End::
// ::Vulkan::Rendering::Functions::Start::
static void BeginRendering()
{
VkCommandBuffer cmd = GetFrameCmdBuf();
VkImage curr_img = renderer.vk.sc.imgs[renderer.frame_state.img_ix];
TransitionImage(cmd, &renderer.vk.sc.draw_img, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
TransitionImage(cmd, &renderer.vk.sc.depth_img, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL);
TransitionImageLayout(cmd, curr_img, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
VkRenderingAttachmentInfo col_attach_info = {
.sType = STYPE(RENDERING_ATTACHMENT_INFO),
.imageView = renderer.vk.sc.draw_img.view,
.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
.storeOp = VK_ATTACHMENT_STORE_OP_STORE,
.clearValue = { { 0.2f, 0.2f, 0.2f, 1.0f } },
};
VkRenderingAttachmentInfo depth_attach_info = {
.sType = STYPE(RENDERING_ATTACHMENT_INFO),
.imageView = renderer.vk.sc.depth_img.view,
.imageLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
.storeOp = VK_ATTACHMENT_STORE_OP_STORE,
};
VkRenderingInfo render_info = {
.sType = STYPE(RENDERING_INFO),
.layerCount = 1,
.colorAttachmentCount = 1,
.pColorAttachments = &col_attach_info,
.pDepthAttachment = &depth_attach_info,
.renderArea = {
.extent = {
.width = renderer.vk.sc.extent.width,
.height = renderer.vk.sc.extent.height,
},
},
};
vkCmdBeginRendering(cmd, &render_info);
vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, renderer.vk.pipe.pipeline_layout, 0, DESC_TYPE_MAX, renderer.vk.pipe.sets, 0, NULL);
}
// ::Vulkan::Rendering::Functions::End::
// ::Vulkan::ImmediateSubmit::Functions::Start::
static b32 BeginImmSubmit(VkDevice device, VkFence fence, VkCommandBuffer cmd)
{
b32 success = true;
VkFence f = fence;
VkResult result = vkResetFences(device, 1, &f);
if (result != VK_SUCCESS)
{
Printfln("vkResetFences failure: %s", VkResultStr(result));
success = false;
}
if (success)
{
result = vkResetCommandBuffer(cmd, 0);
if (result != VK_SUCCESS)
{
Printfln("vkResetCommandBuffer failure: %s", VkResultStr(result));
success = false;
}
}
if (success)
{
VkCommandBufferBeginInfo buf_info = {
.sType = STYPE(COMMAND_BUFFER_BEGIN_INFO),
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
};
result = vkBeginCommandBuffer(cmd, &buf_info);
if (result != VK_SUCCESS)
{
Printfln("vkBeginCommandBuffer failure: %s", VkResultStr(result));
success = false;
}
}
return success;
}
static b32 FinishImmSubmit(VkDevice device, VkFence fence, VkCommandBuffer cmd, VkQueue queue)
{
b32 success = true;
VkFence f = fence;
VkResult result = vkEndCommandBuffer(cmd);
if (result != VK_SUCCESS)
{
Printfln("vkEndCommandBuffer imm failure: %s", VkResultStr(result));
success = false;
}
if (success)
{
VkCommandBufferSubmitInfo cmd_submit_info = {
.sType = STYPE(COMMAND_BUFFER_SUBMIT_INFO),
.commandBuffer = cmd,
};
VkSubmitInfo2 submit_info = {
.sType = STYPE(SUBMIT_INFO_2),
.commandBufferInfoCount = 1,
.pCommandBufferInfos = &cmd_submit_info,
};
result = vkQueueSubmit2(queue, 1, &submit_info, f);
if (result != VK_SUCCESS)
{
Printfln("vkQueueSubmit2 imm failure: %s", VkResultStr(result));
success = false;
}
}
if (success)
{
result = vkWaitForFences(device, 1, &f, true, 9999999999);
if (result != VK_SUCCESS)
{
Printfln("vkWaitForFences imm failure: %s", VkResultStr(result));
success = false;
}
}
return success;
}
// ::Vulkan::ImmediateSubmit::Functions::End::
// ::Vulkan::Swapchain::Functions::Start::
static void ResizeSwapchain()
{
vkDeviceWaitIdle(renderer.vk.device);
DestroySwapchain();
DestroyDrawImages();
renderer.vk.sc.extent.width = renderer.pending.render_width;
renderer.vk.sc.extent.height = renderer.pending.render_height;
Assert(CreateSwapchain(), "Unable to recreate swapchain");
Assert(CreateDrawImages(), "Unable to recreate draw images");
renderer.pending.resized = false;
}
// ::Vulkan::Swapchain::Functions::End::
// ::Vulkan::Images::Functions::Start::
static b32 CreateVkSampler(Image *image, u32 thread_idx, u8 *buf, u32 width, u32 height)
{
b32 success = true;
VkDevice device = renderer.vk.device;
VkCommandBuffer cmd = renderer.vk.imm.cmds[thread_idx];
VkFence fence = renderer.vk.imm.fences[thread_idx];
VkQueue queue = renderer.vk.queues.transfer_queue;
RenderBuffer staging_buffer = {
.type = RENDER_BUFFER_TYPE_STAGING,
.size = width * height,
};
VmaAllocationCreateInfo alloc_create_info = {
.usage = VMA_MEMORY_USAGE_GPU_ONLY,
.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
};
VkImageCreateInfo image_info = {
.sType = STYPE(IMAGE_CREATE_INFO),
.imageType = VK_IMAGE_TYPE_2D,
.mipLevels = 1,
.arrayLayers = 1,
.format = VK_FORMAT_R8G8B8A8_SRGB,
.tiling = VK_IMAGE_TILING_OPTIMAL,
.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
.extent = {
.width = width,
.height = height,
.depth = 1,
},
};
if (renderer.vk.queues.graphics != renderer.vk.queues.transfer)
{
image_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
image_info.queueFamilyIndexCount = 2;
image_info.pQueueFamilyIndices = (u32[]){renderer.vk.queues.graphics, renderer.vk.queues.transfer};
}
VkResult result = vmaCreateImage(renderer.vk.alloc, &image_info,
&alloc_create_info, &image->img, &image->alloc, NULL);
if (result != VK_SUCCESS)
{
success = false;
Printfln("vmaCreateImage failure: %s", VkResultStr(result));
}
if (success)
{
image->curr_layout = VK_IMAGE_LAYOUT_UNDEFINED;
TransitionImage(cmd, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
success = CreateBuffer(&staging_buffer);
}
if (success)
success = BeginImmSubmit(device, fence, cmd);
if (success)
{
rawptr mapped_buf = NULL;
vmaMapMemory(renderer.vk.alloc, staging_buffer.alloc, &mapped_buf);
MemCpy(mapped_buf, buf, width * height);
VkBufferImageCopy buffer_copy = {
.bufferRowLength = width,
.bufferImageHeight = height,
.imageSubresource = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.layerCount = 1,
},
.imageExtent = {
.width = width,
.height = height,
.depth = 1,
},
};
vkCmdCopyBufferToImage(cmd, staging_buffer.buffer, image->img, image->curr_layout, 1, &buffer_copy);
vmaUnmapMemory(renderer.vk.alloc, staging_buffer.alloc);
vmaDestroyBuffer(renderer.vk.alloc, staging_buffer.buffer, staging_buffer.alloc);
success = FinishImmSubmit(device, fence, cmd, queue);
}
if (success)
{
VkImageViewCreateInfo view_info = {
.sType = STYPE(IMAGE_VIEW_CREATE_INFO),
.image = image->img,
.viewType = VK_IMAGE_VIEW_TYPE_2D,
.format = VK_FORMAT_R8G8B8A8_SRGB,
.subresourceRange = {
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.levelCount = 1,
.layerCount = 1,
}
};
result = vkCreateImageView(device, &view_info, NULL, &image->view);
if (result != VK_SUCCESS)
success = false;
}
if (success)
{
VkPhysicalDeviceProperties properties;
vkGetPhysicalDeviceProperties(renderer.vk.phys_device, &properties);
// TODO: handle no anisotropy
VkSamplerCreateInfo sampler_info = {
.sType = STYPE(SAMPLER_CREATE_INFO),
.magFilter = VK_FILTER_LINEAR,
.minFilter = VK_FILTER_LINEAR,
.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
.anisotropyEnable = VK_TRUE,
.maxAnisotropy = properties.limits.maxSamplerAnisotropy,
.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
.compareOp = VK_COMPARE_OP_ALWAYS,
.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
};
result = vkCreateSampler(device, &sampler_info, NULL, &image->sampler);
if (result != VK_SUCCESS)
success = false;
}
return success;
}
// ::Vulkan::Images::Functions::End::
// ::Vulkan::Descriptors::Functions::Start::
static DescAssetInfo *DescriptorTableSearch(DescType type, u64 asset_id)
{
DescAssetInfo *asset_info = NULL;
HashTable *table = &renderer.vk.pipe.bindings[type].lookup_table;
KeyValuePair *kv_pair = HashTableSearchU64(table, asset_id);
if (kv_pair != NULL)
{
asset_info = (DescAssetInfo *)kv_pair->value_rawptr;
}
return asset_info;
}
static void DescriptorTableInsert(DescType type, u64 asset_id, DescHandle handle)
{
DescAssetInfo *asset_info = FLMemAlloc(sizeof(DescAssetInfo));
asset_info->handle = handle;
asset_info->type = type;
asset_info->asset_id = asset_id;
HashTable *table = &renderer.vk.pipe.bindings[type].lookup_table;
HashTablePushU64Rawptr(table, asset_id, asset_info);
}
// ::Vulkan::Descriptors::Functions::End::
// ::Vulkan::Init::Functions::Start::
static b32 CreateVmaAllocator()
{
VmaVulkanFunctions vk_functions = {
.vkGetInstanceProcAddr = vkGetInstanceProcAddr,
.vkGetDeviceProcAddr = vkGetDeviceProcAddr,
};
vma_create_info.pVulkanFunctions = &vk_functions;
vma_create_info.physicalDevice = renderer.vk.phys_device;
vma_create_info.device = renderer.vk.device;
vma_create_info.instance = renderer.vk.inst;
VkResult result = vmaCreateAllocator(&vma_create_info, &renderer.vk.alloc);
if (result != VK_SUCCESS)
{
Printf("vmaCreateAllocator failure: %d", result);
}
return result == VK_SUCCESS;
}
static b32 CheckQueueSurfaceSupport(i32 index, VkPhysicalDevice device, VkSurfaceKHR surface)
{
b32 surface_supported;
vkGetPhysicalDeviceSurfaceSupportKHR(device, (u32)index, surface, &surface_supported);
return surface_supported;
}
static DeviceQueues CheckDeviceQueueSupport(VkPhysicalDevice device, VkSurfaceKHR surface)
{
DeviceQueues queues = { .graphics = -1, .transfer = -1 };
u32 queue_count;
vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_count, NULL);
VkQueueFamilyProperties *families = MakeArray(renderer.arena, VkQueueFamilyProperties, queue_count);
vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_count, families);
if (queue_count == 1 &&
BitEq(families[0].queueFlags, VK_QUEUE_TRANSFER_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT) &&
families[0].queueCount == 1)
{
queues.graphics = queues.transfer = 0;
queues.single_queue = true;
}
else
{
b8 sparse_binding = false;
b8 transfer_only = false;
for (i32 i = 0; i < queue_count; i++)
{
if (queues.graphics < 0 && CheckQueueSurfaceSupport(i, device, surface) && BitEq(families[i].queueFlags, VK_QUEUE_GRAPHICS_BIT))
{
queues.graphics = i;
continue;
}
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT) && !BitEq(families[i].queueFlags, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT))
{
sparse_binding = true;
transfer_only = true;
queues.transfer = i;
continue;
}
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT) && !(sparse_binding && transfer_only))
{
sparse_binding = true;
queues.transfer = i;
continue;
}
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT) && !BitEq(families[i].queueFlags, VK_QUEUE_COMPUTE_BIT) && !sparse_binding)
{
transfer_only = true;
queues.transfer = i;
continue;
}
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT) && !sparse_binding && !transfer_only)
queues.transfer = i;
}
if (queues.transfer < 0)
queues.transfer = queues.graphics;
}
return queues;
}
static b32 CheckDevicePropertiesSupport(VkPhysicalDevice device, VkSurfaceKHR surface, b32 *discrete)
{
b32 success = false;
VkPhysicalDeviceProperties properties = {0};
vkGetPhysicalDeviceProperties(device, &properties);
if (VK_API_VERSION_MINOR(properties.apiVersion) >= 3)
{
u32 ext_count;
vkEnumerateDeviceExtensionProperties(device, NULL, &ext_count, NULL);
VkExtensionProperties *ext_properties = ArenaAlloc(renderer.arena, sizeof(VkExtensionProperties) * ext_count);
vkEnumerateDeviceExtensionProperties(device, NULL, &ext_count, ext_properties);
i32 matched = 0;
for (u32 i = 0; i < ext_count; i++) {
for (i32 j = 0; j < Len(device_extensions); j++) {
if (StrEq(ext_properties[i].extensionName, device_extensions[j])) {
matched++;
break;
}
}
}
if (matched == Len(device_extensions))
{
u32 fmt_count, present_count;
vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &fmt_count, NULL);
vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, NULL);
*discrete = properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
success = fmt_count && present_count;
}
}
return success;
}
static b32 CheckDeviceFeatureSupport(VkPhysicalDevice device)
{
VkPhysicalDeviceFeatures2 features2 = { .sType = STYPE(PHYSICAL_DEVICE_FEATURES_2) };
VkPhysicalDeviceVulkan12Features features_12 = { .sType = STYPE(PHYSICAL_DEVICE_VULKAN_1_2_FEATURES) };
VkPhysicalDeviceVulkan13Features features_13 = { .sType = STYPE(PHYSICAL_DEVICE_VULKAN_1_3_FEATURES) };
features2.pNext = &features_12;
vkGetPhysicalDeviceFeatures2(device, &features2);
features2.pNext = &features_13;
vkGetPhysicalDeviceFeatures2(device, &features2);
VkPhysicalDeviceFeatures features = features2.features;
b32 result = true;
result &= (b32)features.shaderUniformBufferArrayDynamicIndexing;
result &= (b32)features.shaderSampledImageArrayDynamicIndexing;
result &= (b32)features.shaderStorageBufferArrayDynamicIndexing;
result &= (b32)features.shaderStorageImageArrayDynamicIndexing;
result &= (b32)features_12.descriptorIndexing;
result &= (b32)features_12.bufferDeviceAddress;
result &= (b32)features_13.synchronization2;
result &= (b32)features_13.dynamicRendering;
return result;
}
static b32 CreateDevice()
{
VkInstance inst = renderer.vk.inst;
VkSurfaceKHR surface = renderer.vk.surface;
u32 device_count;
vkEnumeratePhysicalDevices(inst, &device_count, NULL);
VkPhysicalDevice *devices = ArenaAlloc(renderer.arena, sizeof(VkPhysicalDevice) * device_count);
vkEnumeratePhysicalDevices(inst, &device_count, devices);
b32 discrete_device = false;
DeviceQueues *queues = &renderer.vk.queues;
VkPhysicalDevice *phys_device = &renderer.vk.phys_device;
for (u32 i = 0; i < device_count; i++) {
DeviceQueues current_queues = CheckDeviceQueueSupport(devices[i], surface);
b32 discrete = false;
if (current_queues.graphics < 0)
continue;
if (!CheckDevicePropertiesSupport(devices[i], surface, &discrete))
continue;
if (discrete_device && !discrete)
continue;
if (!CheckDeviceFeatureSupport(devices[i]))
continue;
discrete_device = discrete;
*queues = current_queues;
*phys_device = devices[i];
if (discrete_device && queues->graphics != queues->transfer)
break;
}
b32 success = false;
if (phys_device != NULL)
{
VkDeviceQueueCreateInfo queue_info[2] = {0};
f32 priority = 1.0f;
u32 count = 1;
u32 transfer_queue_index = 0;
queue_info[0].sType = STYPE(DEVICE_QUEUE_CREATE_INFO);
queue_info[0].queueFamilyIndex = queues->graphics;
queue_info[0].queueCount = 1;
queue_info[0].pQueuePriorities = &priority;
queue_info[0].flags = 0;
if (!queues->single_queue) {
queue_info[1].sType = STYPE(DEVICE_QUEUE_CREATE_INFO);
queue_info[1].queueFamilyIndex = queues->transfer;
queue_info[1].queueCount = 1;
queue_info[1].pQueuePriorities = &priority;
queue_info[1].flags = 0;
if (queues->transfer == queues->graphics)
transfer_queue_index = 1;
count++;
}
device_info.queueCreateInfoCount = count;
device_info.pQueueCreateInfos = &queue_info[0];
VkResult result = vkCreateDevice(renderer.vk.phys_device, &device_info, NULL, &renderer.vk.device);
if (result != VK_SUCCESS) {
Printf("vkCreateDevice failure: %d", result);
}
else
{
Assert(InitVkDeviceFunctions(), "Failed to initialize device functions");
vkGetDeviceQueue(renderer.vk.device, queues->graphics, 0, &queues->graphics_queue);
vkGetDeviceQueue(renderer.vk.device, queues->transfer, transfer_queue_index, &queues->transfer_queue);
success = true;
}
}
return success;
}
static b32 InitVkGlobalFunctions()
{
b32 result = LoadVulkanLib();
if (result)
{
INIT_FN(vkGetInstanceProcAddr);
INIT_FN(vkEnumerateInstanceLayerProperties);
INIT_FN(vkCreateInstance);
}
return result;
}
static b32 InitVkInstanceFunctions()
{
VkInstance instance = renderer.vk.inst;
#ifdef __linux__
{
INIT_INST_FN(vkCreateXcbSurfaceKHR);
}
#elif _WIN32
{
INIT_INST_FN(vkCreateWin32SurfaceKHR);
}
#elif __APPLE__ || __MACH__
#error Not yet implemented
#elif __unix__ && !__linux__
#error Not yet implemented
#endif
#ifdef BUILD_DEBUG
{
INIT_INST_FN(vkCreateDebugUtilsMessengerEXT);
INIT_INST_FN(vkDestroyDebugUtilsMessengerEXT);
}
#endif
INIT_INST_FN(vkEnumeratePhysicalDevices);
INIT_INST_FN(vkGetPhysicalDeviceQueueFamilyProperties);
INIT_INST_FN(vkGetPhysicalDeviceSurfaceSupportKHR);
INIT_INST_FN(vkCreateDevice);
INIT_INST_FN(vkGetPhysicalDeviceProperties);
INIT_INST_FN(vkGetPhysicalDeviceFeatures2);
INIT_INST_FN(vkGetPhysicalDeviceSurfaceFormatsKHR);
INIT_INST_FN(vkGetPhysicalDeviceSurfacePresentModesKHR);
INIT_INST_FN(vkEnumerateDeviceExtensionProperties);
INIT_INST_FN(vkGetDeviceProcAddr);
INIT_INST_FN(vkDestroyInstance);
INIT_INST_FN(vkDestroySurfaceKHR);
INIT_INST_FN(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
INIT_INST_FN(vkGetPhysicalDeviceImageFormatProperties);
return true;
}
static b32 InitVkDeviceFunctions() {
VkDevice device = renderer.vk.device;
INIT_DEV_FN(vkCreateSwapchainKHR);
INIT_DEV_FN(vkCreateImage);
INIT_DEV_FN(vkCreateImageView);
INIT_DEV_FN(vkGetSwapchainImagesKHR);
INIT_DEV_FN(vkGetDeviceQueue);
INIT_DEV_FN(vkCreateSemaphore);
INIT_DEV_FN(vkAllocateCommandBuffers);
INIT_DEV_FN(vkCreateCommandPool);
INIT_DEV_FN(vkCreateFence);
INIT_DEV_FN(vkCreateDescriptorPool);
INIT_DEV_FN(vkCreateDescriptorSetLayout);
INIT_DEV_FN(vkAllocateDescriptorSets);
INIT_DEV_FN(vkCreatePipelineLayout);
INIT_DEV_FN(vkResetDescriptorPool);
INIT_DEV_FN(vkCreateShaderModule);
INIT_DEV_FN(vkCreateGraphicsPipelines);
INIT_DEV_FN(vkCreateComputePipelines);
INIT_DEV_FN(vkUpdateDescriptorSets);
INIT_DEV_FN(vkDestroyDevice);
INIT_DEV_FN(vkDestroyDescriptorPool);
INIT_DEV_FN(vkDestroySwapchainKHR);
INIT_DEV_FN(vkDestroyImage);
INIT_DEV_FN(vkDestroyImageView);
INIT_DEV_FN(vkDestroyCommandPool);
INIT_DEV_FN(vkDestroySemaphore);
INIT_DEV_FN(vkDestroyFence);
INIT_DEV_FN(vkDestroyPipelineLayout);
INIT_DEV_FN(vkDestroyPipeline);
INIT_DEV_FN(vkWaitForFences);
INIT_DEV_FN(vkBeginCommandBuffer);
INIT_DEV_FN(vkEndCommandBuffer);
INIT_DEV_FN(vkAcquireNextImageKHR);
INIT_DEV_FN(vkCmdBindPipeline);
INIT_DEV_FN(vkCmdBindDescriptorSets);
INIT_DEV_FN(vkCmdDispatch);
INIT_DEV_FN(vkCmdBeginRendering);
INIT_DEV_FN(vkCmdEndRendering);
INIT_DEV_FN(vkCmdSetViewport);
INIT_DEV_FN(vkCmdSetScissor);
INIT_DEV_FN(vkCmdPushConstants);
INIT_DEV_FN(vkCmdBindIndexBuffer);
INIT_DEV_FN(vkCmdBindVertexBuffers);
INIT_DEV_FN(vkCmdDrawIndexed);
INIT_DEV_FN(vkCmdBlitImage2);
INIT_DEV_FN(vkCmdPipelineBarrier2);
INIT_DEV_FN(vkCmdCopyBufferToImage);
INIT_DEV_FN(vkCmdCopyBuffer);
INIT_DEV_FN(vkQueueSubmit2);
INIT_DEV_FN(vkResetFences);
INIT_DEV_FN(vkResetCommandBuffer);
INIT_DEV_FN(vkFreeCommandBuffers);
INIT_DEV_FN(vkDestroyDescriptorSetLayout);
INIT_DEV_FN(vkDestroyShaderModule);
INIT_DEV_FN(vkQueuePresentKHR);
INIT_DEV_FN(vkCmdDraw);
INIT_DEV_FN(vkDeviceWaitIdle);
INIT_DEV_FN(vkCmdClearColorImage);
INIT_DEV_FN(vkCreateSampler);
return true;
}
// TODO(MA): implement other platforms
#ifdef __linux__
static b32 CreateSurface()
{
PlatformWindow *window = GetPlatformWindow();
VkXcbSurfaceCreateInfoKHR surface_info = {
.sType = STYPE(XCB_SURFACE_CREATE_INFO_KHR),
.connection = window->connection,
.window = window->window
};
VkResult result = vkCreateXcbSurfaceKHR(renderer.vk.inst, &surface_info, NULL, &renderer.vk.surface);
if (result != VK_SUCCESS) {
Printf("Unable to create surface: %d", result);
}
return result == VK_SUCCESS;
}
#elif _WIN32
static b32 CreateSurface()
{
b32 success = true;
PlatformWindow *window = GetPlatformWindow();
VkWin32SurfaceCreateInfoKHR surface_info = {
.sType = STYPE(WIN32_SURFACE_CREATE_INFO_KHR),
.hinstance = window->instance,
.hwnd = window->handle,
};
VkResult result = vkCreateWin32SurfaceKHR(renderer.vk.inst, &surface_info, NULL, &renderer.vk.surface);
if (result != VK_SUCCESS)
{
Printfln("Unable to create surface: %s", VkResultStr(result));
success = false;
}
return success;
}
#endif
static b32 LoadVulkanLib()
{
Library *lib = &renderer.vk.lib;
b32 lib_found; Function fn;
for (i32 i = 0; i < Len(vulkan_libs); i++) {
lib_found = LoadLib(vulkan_libs[i], lib);
if (lib_found) {
lib_found = LoadFn("vkGetInstanceProcAddr", lib, &fn);
vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)fn.fn;
break;
}
}
return lib_found;
}
static b32 CreateFrameStructures()
{
b32 success = true;
FrameStructures *data = &renderer.vk.frame;
u32 img_count = renderer.vk.sc.img_count;
pool_create_info.queueFamilyIndex = renderer.vk.queues.graphics;
renderer.vk.frame.pools = MakeArray(renderer.perm_arena, VkCommandPool, img_count);
renderer.vk.frame.buffers = MakeArray(renderer.perm_arena, VkCommandBuffer, img_count);
renderer.vk.frame.swapchain_sems = MakeArray(renderer.perm_arena, VkSemaphore, img_count);
renderer.vk.frame.render_sems = MakeArray(renderer.perm_arena, VkSemaphore, img_count);
renderer.vk.frame.render_fences = MakeArray(renderer.perm_arena, VkFence, img_count);
renderer.vk.frame.buffer_destroy_queues = MakeArray(renderer.perm_arena, RenderBuffer *, FRAME_OVERLAP);
renderer.vk.frame.buffer_counts = MakeArray(renderer.perm_arena, u32, FRAME_OVERLAP);
for (u32 i = 0; i < FRAME_OVERLAP; i++)
{
VkResult result;
VkDevice device = renderer.vk.device;
result = vkCreateCommandPool(device, &pool_create_info, NULL, &data->pools[i]);
if (result != VK_SUCCESS)
success = false;
cmd_buf_info.commandPool = data->pools[i];
result = vkAllocateCommandBuffers(device, &cmd_buf_info, &data->buffers[i]);
if (result != VK_SUCCESS)
success = false;
result = vkCreateFence(device, &fence_create_info, NULL, &data->render_fences[i]);
if (result != VK_SUCCESS)
success = false;
result = vkCreateSemaphore(device, &semaphore_create_info, NULL, &data->render_sems[i]);
if (result != VK_SUCCESS)
success = false;
result = vkCreateSemaphore(device, &semaphore_create_info, NULL, &data->swapchain_sems[i]);
if (result != VK_SUCCESS)
success = false;
renderer.vk.frame.buffer_destroy_queues[i] = ArenaAlloc(renderer.perm_arena, sizeof(RenderBuffer) * 64);
}
return success;
}
static b32 CreateImmediateStructures()
{
b32 success = true;
VkResult result;
VkDevice device = renderer.vk.device;
ImmediateStructures *imm = &renderer.vk.imm;
pool_create_info.queueFamilyIndex = renderer.vk.queues.transfer;
if (renderer.vk_conf.avail_threads >= 4 && !renderer.vk.queues.single_queue)
renderer.vk_conf.avail_threads = 1;
else
renderer.vk_conf.avail_threads = 0;
imm->pools = ArenaAlloc(renderer.perm_arena, sizeof(VkCommandPool) * renderer.vk_conf.avail_threads);
imm->cmds = ArenaAlloc(renderer.perm_arena, sizeof(VkCommandBuffer) * renderer.vk_conf.avail_threads);
imm->fences = ArenaAlloc(renderer.perm_arena, sizeof(VkFence) * renderer.vk_conf.avail_threads);
imm->queued_buffers = ArenaAlloc(renderer.perm_arena, sizeof(RenderBuffer) * BUFFER_QUEUE_LEN);
imm->data = ArenaAlloc(renderer.perm_arena, sizeof(void *) * BUFFER_QUEUE_LEN);
for (u32 i = 0; i < renderer.vk_conf.avail_threads && success; i++)
{
result = vkCreateCommandPool(device, &pool_create_info, NULL, &imm->pools[i]);
if (result != VK_SUCCESS)
success = false;
cmd_buf_info.commandPool = imm->pools[i];
result = vkAllocateCommandBuffers(device, &cmd_buf_info, &imm->cmds[i]);
if (result != VK_SUCCESS)
success = false;
result = vkCreateFence(device, &fence_create_info, NULL, &imm->fences[i]);
if (result != VK_SUCCESS)
success = false;
}
return success;
}
static b32 CreateSwapchain()
{
b32 success = true;
VkPhysicalDevice phys_device = renderer.vk.phys_device;
VkDevice device = renderer.vk.device;
VkSurfaceKHR surface = renderer.vk.surface;
VkPresentModeKHR present_mode = renderer.vk.sc.present_mode;
VkFormat format = renderer.vk.sc.format;
VkColorSpaceKHR color_space = renderer.vk.sc.color_space;
VkSurfaceCapabilitiesKHR capabilities;
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface, &capabilities);
// Maybe reconsider handling window sizing within here and only handle it from events themselves
// causes issues when the window size is out of sync with the current swapchain
VkExtent2D extent;
u32 width = renderer.vk.sc.extent.width;
u32 height = renderer.vk.sc.extent.height;
extent.width = Clamp((u32)width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width);
extent.height = Clamp((u32)height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height);
if (present_mode == INT_MAX || format == INT_MAX || color_space == INT_MAX)
{
u32 format_count;
vkGetPhysicalDeviceSurfaceFormatsKHR(phys_device, surface, &format_count, NULL);
VkSurfaceFormatKHR *formats = ArenaAlloc(renderer.arena, sizeof(VkSurfaceFormatKHR) * format_count);
vkGetPhysicalDeviceSurfaceFormatsKHR(phys_device, surface, &format_count, formats);
format = formats[0].format;
color_space = formats[0].colorSpace;
u32 present_count;
vkGetPhysicalDeviceSurfacePresentModesKHR(phys_device, surface, &present_count, NULL);
VkPresentModeKHR *present_modes = ArenaAlloc(renderer.arena, sizeof(VkSurfaceFormatKHR) * present_count);
vkGetPhysicalDeviceSurfacePresentModesKHR(phys_device, surface, &present_count, present_modes);
for (u32 i = 0; i < present_count; i++)
{
if (present_modes[i] == VK_PRESENT_MODE_MAILBOX_KHR)
{
present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
break;
}
}
if (present_mode != VK_PRESENT_MODE_MAILBOX_KHR)
present_mode = VK_PRESENT_MODE_FIFO_KHR;
}
swapchain_create_info.minImageCount = capabilities.minImageCount + 1;
swapchain_create_info.surface = surface;
swapchain_create_info.imageFormat = format;
swapchain_create_info.imageColorSpace = color_space;
swapchain_create_info.imageExtent = extent;
swapchain_create_info.preTransform = capabilities.currentTransform;
swapchain_create_info.presentMode = present_mode;
VkResult result;
result = vkCreateSwapchainKHR(device, &swapchain_create_info, NULL, &renderer.vk.swapchain);
if (result != VK_SUCCESS)
success = false;
u32 image_count;
vkGetSwapchainImagesKHR(device, renderer.vk.swapchain, &image_count, NULL);
VkImage *sc_images = ArenaAlloc(renderer.perm_arena, sizeof(VkImage) * image_count);
VkImageView *sc_views = ArenaAlloc(renderer.perm_arena, sizeof(VkImageView) * image_count);
vkGetSwapchainImagesKHR(device, renderer.vk.swapchain, &image_count, sc_images);
for (u32 i = 0; i < image_count; i++)
{
sc_image_view_create_info.image = sc_images[i];
sc_image_view_create_info.format = format;
result = vkCreateImageView(device, &sc_image_view_create_info, NULL, &sc_views[i]);
if (result != VK_SUCCESS)
success = false;
}
renderer.vk.sc.format = format;
renderer.vk.sc.color_space = color_space;
renderer.vk.sc.present_mode = present_mode;
renderer.vk.sc.imgs = sc_images;
renderer.vk.sc.views = sc_views;
renderer.vk.sc.img_count = image_count;
renderer.vk.sc.extent.width = extent.width;
renderer.vk.sc.extent.height = extent.height;
renderer.vk.sc.extent.depth = 1;
return success;
}
static b32 CreateDrawImages()
{
b32 success = true;
VkResult result;
VkFormat image_format = GetImageFormat();
VkExtent3D extent = renderer.vk.sc.extent;
VkDevice device = renderer.vk.device;
VmaAllocationCreateInfo alloc_create_info = {
.usage = VMA_MEMORY_USAGE_GPU_ONLY,
.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
};
// Draw Image
draw_image_create_info.format = image_format;
draw_image_create_info.extent = extent;
result = vmaCreateImage(renderer.vk.alloc, &draw_image_create_info,
&alloc_create_info, &renderer.vk.sc.draw_img.img, &renderer.vk.sc.draw_img.alloc, NULL);
if (result != VK_SUCCESS)
success = false;
// Draw Image View
draw_image_view_create_info.image = renderer.vk.sc.draw_img.img;
draw_image_view_create_info.format = image_format;
result = vkCreateImageView(device, &draw_image_view_create_info, NULL, &renderer.vk.sc.draw_img.view);
if (result != VK_SUCCESS)
success = false;
// Depth Image
depth_image_create_info.extent = extent;
result = vmaCreateImage(renderer.vk.alloc, &depth_image_create_info,
&alloc_create_info, &renderer.vk.sc.depth_img.img, &renderer.vk.sc.depth_img.alloc, NULL);
if (result != VK_SUCCESS)
success = false;
// Depth Image View
depth_image_view_create_info.image = renderer.vk.sc.depth_img.img;
result = vkCreateImageView(device, &depth_image_view_create_info, NULL, &renderer.vk.sc.depth_img.view);
if (result != VK_SUCCESS)
success = false;
// Setting values
renderer.vk.sc.extent = extent;
renderer.vk.sc.depth_img.fmt = depth_image_create_info.format;
renderer.vk.sc.depth_img.curr_layout = VK_IMAGE_LAYOUT_UNDEFINED;
renderer.vk.sc.draw_img.fmt = draw_image_create_info.format;
renderer.vk.sc.draw_img.curr_layout = VK_IMAGE_LAYOUT_UNDEFINED;
return success;
}
static VkFormat GetImageFormat()
{
VkPhysicalDevice phys_device = renderer.vk.phys_device;
VkImageType image_type = VK_IMAGE_TYPE_2D;
VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
VkImageUsageFlags usage_flags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_STORAGE_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
VkFormat format = 0;
for (i32 i = 0; i < Len(VK_IMAGE_FORMATS); i++)
{
VkImageFormatProperties properties;
VkResult result;
result = vkGetPhysicalDeviceImageFormatProperties(phys_device, VK_IMAGE_FORMATS[i], image_type,
tiling, usage_flags, 0, &properties);
if (result == VK_ERROR_FORMAT_NOT_SUPPORTED)
continue;
if (result == VK_SUCCESS)
{
format = VK_IMAGE_FORMATS[i];
break;
}
}
Assert(format != 0, "[Error] unable to find appropriate image format");
return format;
}
static b32 CreateDescriptors()
{
b32 success = true;
renderer.vk.pipe.bindings = ArenaAlloc(renderer.perm_arena, sizeof(DescBindings) * DESC_TYPE_MAX);
VkDevice device = renderer.vk.device;
VkResult result;
DescBindings *bindings = renderer.vk.pipe.bindings;
result = vkCreateDescriptorPool(device, &desc_pool_info, NULL, &renderer.vk.pipe.pool);
if (result != VK_SUCCESS)
success = false;
result = vkCreateDescriptorSetLayout(device, &shared_layout_create_info, NULL, &renderer.vk.pipe.layouts[DESC_TYPE_SHARED]);
if (result != VK_SUCCESS)
success = false;
for (u32 i = DESC_TYPE_COMBINED_SAMPLER; i < DESC_TYPE_MAX; i++)
{
bindless_layout_binding.descriptorType = desc_type_map[i];
result = vkCreateDescriptorSetLayout(device, &bindless_layout_create_info, NULL, &renderer.vk.pipe.layouts[i]);
if (result != VK_SUCCESS)
success = false;
}
set_allocate_info.descriptorPool = renderer.vk.pipe.pool;
set_allocate_info.pSetLayouts = renderer.vk.pipe.layouts;
result = vkAllocateDescriptorSets(device, &set_allocate_info, renderer.vk.pipe.sets);
if (result != VK_SUCCESS)
success = false;
pipeline_layout_create_info.setLayoutCount = DESC_TYPE_MAX;
pipeline_layout_create_info.pSetLayouts = renderer.vk.pipe.layouts;
result = vkCreatePipelineLayout(device, &pipeline_layout_create_info, NULL, &renderer.vk.pipe.pipeline_layout);
if (result != VK_SUCCESS)
success = false;
for (u32 i = 0; i < DESC_TYPE_MAX; i++)
{
bindings[i].free = ArenaAlloc(renderer.perm_arena, sizeof(u16) * DESC_MAX_BINDINGS);
bindings[i].used = ArenaAlloc(renderer.perm_arena, sizeof(u16) * DESC_MAX_BINDINGS);
HashTableInit(&bindings[i].lookup_table, 6);
u16 free_count = 0;
for (i32 j = DESC_MAX_BINDINGS-1; j >= 0; j--)
{
bindings[i].free[j] = free_count++;
}
bindings[i].free_count = free_count;
}
return success;
}
static b32 CreatePipelines()
{
b32 success = true;
VkResult result;
VkDevice device = renderer.vk.device;
Asset quad_vert_shader = AssetPackLoadShader(QUAD_VERT_SPIRV_SHADER);
Asset quad_frag_shader = AssetPackLoadShader(QUAD_FRAG_SPIRV_SHADER);
VkShaderModule cube_vert, cube_frag;
success &= CreateShaderModule(quad_vert_shader.bytes, quad_vert_shader.len, &cube_vert);
success &= CreateShaderModule(quad_frag_shader.bytes, quad_frag_shader.len, &cube_frag);
VkPipelineRenderingCreateInfo pipeline_render_info = {
.sType = STYPE(PIPELINE_RENDERING_CREATE_INFO),
.colorAttachmentCount = 1,
.pColorAttachmentFormats = &renderer.vk.sc.draw_img.fmt,
.depthAttachmentFormat = renderer.vk.sc.depth_img.fmt,
};
VkPipelineShaderStageCreateInfo cube_shader_stages[] = {
{
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
.stage = VK_SHADER_STAGE_VERTEX_BIT,
.module = cube_vert,
.pName = "main",
},
{
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
.stage = VK_SHADER_STAGE_FRAGMENT_BIT,
.module = cube_frag,
.pName = "main",
},
};
cube_create_info.pStages = cube_shader_stages;
cube_create_info.stageCount = Len(cube_shader_stages);
cube_create_info.layout = renderer.vk.pipe.pipeline_layout;
cube_create_info.pNext = &pipeline_render_info;
result = vkCreateGraphicsPipelines(device, 0, 1, &cube_create_info, NULL, &renderer.vk.pipe.pipelines[PIPELINE_CUBE]);
if (result != VK_SUCCESS)
{
Printf("vkCreateGraphicsPipelines failure: %s", VkResultStr(result));
success = false;
}
vkDestroyShaderModule(device, cube_vert, NULL);
vkDestroyShaderModule(device, cube_frag, NULL);
AssetPackUnloadShader(quad_vert_shader);
AssetPackUnloadShader(quad_frag_shader);
Asset gui_vert_shader = AssetPackLoadShader(GUI_VERT_SPIRV_SHADER);
Asset gui_frag_shader = AssetPackLoadShader(GUI_FRAG_SPIRV_SHADER);
VkShaderModule gui_vert, gui_frag;
success &= CreateShaderModule(gui_vert_shader.bytes, gui_vert_shader.len, &gui_vert);
success &= CreateShaderModule(gui_frag_shader.bytes, gui_frag_shader.len, &gui_frag);
VkPipelineShaderStageCreateInfo gui_shader_stages[] = {
{
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
.stage = VK_SHADER_STAGE_VERTEX_BIT,
.module = gui_vert,
.pName = "main",
},
{
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
.stage = VK_SHADER_STAGE_FRAGMENT_BIT,
.module = gui_frag,
.pName = "main",
},
};
gui_create_info.pStages = gui_shader_stages;
gui_create_info.stageCount = Len(gui_shader_stages);
gui_create_info.layout = renderer.vk.pipe.pipeline_layout;
gui_create_info.pNext = &pipeline_render_info;
result = vkCreateGraphicsPipelines(device, 0, 1, &gui_create_info, NULL, &renderer.vk.pipe.pipelines[PIPELINE_GUI]);
if (result != VK_SUCCESS)
{
Printfln("vkCreateGraphicsPipelines failure: %s", VkResultStr(result));
success = false;
}
vkDestroyShaderModule(device, gui_vert, NULL);
vkDestroyShaderModule(device, gui_frag, NULL);
AssetPackUnloadShader(gui_vert_shader);
AssetPackUnloadShader(gui_frag_shader);
return success;
}
static b32 CreateShaderModule(u8 *bytes, u32 len, VkShaderModule *module)
{
VkResult result;
b32 success = true;
VkShaderModuleCreateInfo module_info = {
.sType = STYPE(SHADER_MODULE_CREATE_INFO),
.codeSize = len,
.pCode = (u32 *)bytes,
};
result = vkCreateShaderModule(renderer.vk.device, &module_info, NULL, module);
if (result != VK_SUCCESS)
{
Printf("vkCreateShaderModule failure: %s", VkResultStr(result));
success = false;
}
return success;
}
#if __linux__
static void StartVkLoaderThreads()
{
if (renderer.vk_conf.avail_threads == 0)
return;
u32 count = renderer.vk_conf.avail_threads;
pthread_t *threads = ArenaAlloc(renderer.perm_arena, sizeof(pthread_t) * count);
for (u32 i = 0; i < count; i++)
{
vk_thread_indices[i] = i;
pthread_create(&threads[i], NULL, VkLoaderStart, &vk_thread_indices[i]);
}
}
#elif _WIN32
# error Threading not yet implemented
#endif
// ::Vulkan::Init::Functions::End::
// ::Vulkan::Async::Functions::Start::
#ifdef __linux__
void *VkLoaderStart(void *i)
{
u32 index = *(u32 *)i;
pthread_t self = pthread_self();
pthread_mutex_t mut;
pthread_mutex_init(&mut, NULL);
for (;;)
{
TicketMutLock(&renderer.vk.imm.mut);
u32 job_count = JobQueueGetCount(&renderer.vk.imm.queue);
if (job_count < 0)
{
TicketMutUnlock(&renderer.vk.imm.mut);
pthread_exit(NULL);
}
else if (job_count == 0)
{
TicketMutUnlock(&renderer.vk.imm.mut);
pthread_mutex_lock(&mut);
pthread_cond_wait(&cond, &mut);
pthread_mutex_unlock(&mut);
}
else
{
RenderBuffer *buffers[16];
rawptr data[16];
u32 count = 0;
for (u32 i = 0; i < job_count && i < 16; i++)
{
buffers[i] = renderer.vk.imm.queued_buffers[i];
data[i] = renderer.vk.imm.data[i];
count += 1;
}
JobQueueMarkUnqueued(&renderer.vk.imm.queue, count);
TicketMutUnlock(&renderer.vk.imm.mut);
for (u32 i = 0; i < count; i++)
Assert(CreateBuffer(buffers[i]), "VkLoader CreateBuffer failure");
Assert(UploadToBuffer(buffers, data, count, index), "VkLoader UploadToBuffer failure");
JobQueueMarkCompleted(&renderer.vk.imm.queue, count);
}
}
}
void VkLoaderWake()
{
for (u32 i = 0; i < renderer.vk_conf.avail_threads; i++)
pthread_cond_signal(&cond);
}
#elif _WIN32
#error not yet implemented
#endif
// ::Vulkan::Async::Functions::End::
// ::Vulkan::CleanUp::Functions::Start::
static void DestroySwapchain()
{
for (u32 i = 0; i < renderer.vk.sc.img_count; i++)
{
vkDestroyImageView(renderer.vk.device, renderer.vk.sc.views[i], NULL);
}
vkDestroySwapchainKHR(renderer.vk.device, renderer.vk.swapchain, NULL);
}
static void DestroyDrawImages()
{
SwapchainStructures *sc = &renderer.vk.sc;
VkDevice device = renderer.vk.device;
VmaAllocator vma_alloc = renderer.vk.alloc;
vkDestroyImageView(device, sc->draw_img.view, NULL);
vmaDestroyImage(vma_alloc, sc->draw_img.img, sc->draw_img.alloc);
vkDestroyImageView(device, sc->depth_img.view, NULL);
vmaDestroyImage(vma_alloc, sc->depth_img.img, sc->depth_img.alloc);
}
// ::Vulkan::CleanUp::Functions::End::
// ::Vulkan::Logging::Functions::Start::
void VkInfo(const char *str)
{
Printfln("[INFO] %s", str);
}
void VkWarn(const char *str)
{
Printfln("[WARN] %s", str);
}
void VkError(const char *str)
{
Printfln("[ERROR] %s", str);
}
// ::Vulkan::Logging::Functions::End::
// ::Vulkan::Debug::Functions::Start::
const char *VkResultStr(VkResult result)
{
switch (result)
{
case VK_SUCCESS:
return "VK_SUCCESS";
case VK_NOT_READY:
return "VK_NOT_READY";
case VK_TIMEOUT:
return "VK_TIMEOUT";
case VK_EVENT_SET:
return "VK_EVENT_SET";
case VK_EVENT_RESET:
return "VK_EVENT_RESET";
case VK_INCOMPLETE:
return "VK_INCOMPLETE";
case VK_ERROR_OUT_OF_HOST_MEMORY:
return "VK_ERROR_OUT_OF_HOST_MEMORY";
case VK_ERROR_OUT_OF_DEVICE_MEMORY:
return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
case VK_ERROR_INITIALIZATION_FAILED:
return "VK_ERROR_INITIALIZATION_FAILED";
case VK_ERROR_DEVICE_LOST:
return "VK_ERROR_DEVICE_LOST";
case VK_ERROR_MEMORY_MAP_FAILED:
return "VK_ERROR_MEMORY_MAP_FAILED";
case VK_ERROR_LAYER_NOT_PRESENT:
return "VK_ERROR_LAYER_NOT_PRESENT";
case VK_ERROR_EXTENSION_NOT_PRESENT:
return "VK_ERROR_EXTENSION_NOT_PRESENT";
case VK_ERROR_FEATURE_NOT_PRESENT:
return "VK_ERROR_FEATURE_NOT_PRESENT";
case VK_ERROR_INCOMPATIBLE_DRIVER:
return "VK_ERROR_INCOMPATIBLE_DRIVER";
case VK_ERROR_TOO_MANY_OBJECTS:
return "VK_ERROR_TOO_MANY_OBJECTS";
case VK_ERROR_FORMAT_NOT_SUPPORTED:
return "VK_ERROR_FORMAT_NOT_SUPPORTED";
case VK_ERROR_FRAGMENTED_POOL:
return "VK_ERROR_FRAGMENTED_POOL";
case VK_ERROR_UNKNOWN:
return "VK_ERROR_UNKNOWN";
case VK_ERROR_OUT_OF_POOL_MEMORY:
return "VK_ERROR_OUT_OF_POOL_MEMORY";
case VK_ERROR_INVALID_EXTERNAL_HANDLE:
return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
case VK_ERROR_FRAGMENTATION:
return "VK_ERROR_FRAGMENTATION";
case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS:
return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS";
case VK_PIPELINE_COMPILE_REQUIRED:
return "VK_PIPELINE_COMPILE_REQUIRED";
case VK_ERROR_SURFACE_LOST_KHR:
return "VK_ERROR_SURFACE_LOST_KHR";
case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
case VK_SUBOPTIMAL_KHR:
return "VK_SUBOPTIMAL_KHR";
case VK_ERROR_OUT_OF_DATE_KHR:
return "VK_ERROR_OUT_OF_DATE_KHR";
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
case VK_ERROR_VALIDATION_FAILED_EXT:
return "VK_ERROR_VALIDATION_FAILED_EXT";
case VK_ERROR_INVALID_SHADER_NV:
return "VK_ERROR_INVALID_SHADER_NV";
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR:
return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR";
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR:
return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR";
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR:
return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR";
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR:
return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR";
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR:
return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR";
#endif
#ifdef VK_ENABLE_BETA_EXTENSIONS
case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR:
return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR";
#endif
case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
case VK_ERROR_NOT_PERMITTED_KHR:
return "VK_ERROR_NOT_PERMITTED_KHR";
case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
case VK_THREAD_IDLE_KHR:
return "VK_THREAD_IDLE_KHR";
case VK_THREAD_DONE_KHR:
return "VK_THREAD_DONE_KHR";
case VK_OPERATION_DEFERRED_KHR:
return "VK_OPERATION_DEFERRED_KHR";
case VK_OPERATION_NOT_DEFERRED_KHR:
return "VK_OPERATION_NOT_DEFERRED_KHR";
case VK_ERROR_COMPRESSION_EXHAUSTED_EXT:
return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT";
case VK_RESULT_MAX_ENUM:
return "VK_RESULT_MAX_ENUM";
default:
return "??????";
}
}
static VKAPI_ATTR VkBool32 DebugCallback(
VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
VkDebugUtilsMessageTypeFlagsEXT message_type,
const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
void *pUserData
) {
char *ms, *mt;
switch (message_severity) {
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
ms = (char *)"VERBOSE";
break;
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
ms = (char *)"INFO";
break;
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
ms = (char *)"WARNING";
break;
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
ms = (char *)"ERROR";
break;
default:
ms = (char *)"UNKNOWN";
break;
}
switch (message_type) {
case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
mt = (char *)"General";
break;
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT:
mt = (char *)"Validation";
break;
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
mt = (char *)"Validation | General";
break;
case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
mt = (char *)"Performance";
break;
case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
mt = (char *)"General | Performance";
break;
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
mt = (char *)"Validation | Performance";
break;
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
mt = (char *)"General | Validation | Performance";
break;
default:
mt = (char *)"Unknown";
break;
}
Printf("[%s: %s]\n%s\n", ms, mt, pCallbackData->pMessage);
return VK_FALSE;
}
static b32 VLayersSupported()
{
b32 success = false;
u32 count;
vkEnumerateInstanceLayerProperties(&count, NULL);
Assert(count, "VLayersSupported(): vkEnumerateInstanceLayerProperties returned a count of 0");
VkLayerProperties *layers = ArenaAlloc(renderer.arena, sizeof(VkLayerProperties) * count);
vkEnumerateInstanceLayerProperties(&count, layers);
for (u32 i = 0; i < count; i++) {
if (StrEq(layers[i].layerName, _VK_VALIDATION)) {
success = true;
break;
}
}
return success;
}
// ::Vulkan::Debug::Functions::End::