2442 lines
64 KiB
C
2442 lines
64 KiB
C
// ::Vulkan::Globals::Start::
|
|
|
|
static vRenderer v_Renderer = {
|
|
.state = {
|
|
.vk = {
|
|
.gfx_queue_idx = UINT32_MAX,
|
|
.tfer_queue_idx = UINT32_MAX,
|
|
},
|
|
.swapchain = {
|
|
.format = INT_MAX,
|
|
.color_space = INT_MAX,
|
|
.present_mode = INT_MAX,
|
|
},
|
|
},
|
|
};
|
|
|
|
#ifdef BUILD_DEBUG
|
|
|
|
RENDERDOC_API_1_1_2 *v_rdoc_api = NULL;
|
|
b32 v_rdoc_captured = false;
|
|
|
|
#endif
|
|
|
|
// ::Vulkan::Globals::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Includes::CFiles::Start::
|
|
|
|
#include "renderer_vulkan_public.c"
|
|
|
|
// ::Vulkan::Includes::CFiles::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Util::Functions::Start::
|
|
|
|
static inline u32
|
|
vFrameIndex()
|
|
{
|
|
return v_Renderer.state.renderer.frame_count % FRAME_OVERLAP;
|
|
}
|
|
|
|
static inline u32
|
|
vFrameNextIndex()
|
|
{
|
|
return (v_Renderer.state.renderer.frame_count + 1) % FRAME_OVERLAP;
|
|
}
|
|
|
|
static inline VkCommandBuffer
|
|
vFrameCmdBuf()
|
|
{
|
|
return v_Renderer.frame_handles[vFrameIndex()].buffer;
|
|
}
|
|
|
|
static inline VkFence
|
|
vFrameRenderFence()
|
|
{
|
|
return v_Renderer.frame_handles[vFrameIndex()].r_fence;
|
|
}
|
|
|
|
static inline VkImage
|
|
vFrameImage()
|
|
{
|
|
return v_Renderer.images.sc.data[v_Renderer.state.vk.image_idx].image.image;
|
|
}
|
|
|
|
static inline Arena *
|
|
vFrameArena()
|
|
{
|
|
return v_Renderer.mem.frame_arenas[vFrameIndex()];
|
|
}
|
|
|
|
static inline VkSemaphore
|
|
vFrameRenderSem()
|
|
{
|
|
return v_Renderer.frame_handles[vFrameIndex()].r_sem;
|
|
}
|
|
|
|
static inline VkSemaphore
|
|
vFrameSwapSem()
|
|
{
|
|
return v_Renderer.frame_handles[vFrameIndex()].sc_sem;
|
|
}
|
|
|
|
static inline vBufferPtrArray *
|
|
vFrameBuffers()
|
|
{
|
|
return v_Renderer.buffers.frame_buffers + vFrameIndex();
|
|
}
|
|
|
|
static inline b8 *
|
|
vFrameTexDestroyQueue()
|
|
{
|
|
return v_Renderer.buffers.tex_destroy_queue.data[vFrameIndex()];
|
|
}
|
|
|
|
static inline b8 *
|
|
vFrameNextTexDestroyQueue()
|
|
{
|
|
return v_Renderer.buffers.tex_destroy_queue.data[vFrameNextIndex()];
|
|
}
|
|
|
|
static inline void
|
|
vImageCopyToImage(VkCommandBuffer cmd, VkImage src, VkImage dst, VkExtent2D src_ext, VkExtent2D dst_ext)
|
|
{
|
|
VkImageBlit2 blit = {
|
|
.sType = STYPE(IMAGE_BLIT_2),
|
|
.srcOffsets = {
|
|
{0},
|
|
{ .x = (i32)src_ext.width, .y = (i32)src_ext.height, .z = 1 },
|
|
},
|
|
.dstOffsets = {
|
|
{0},
|
|
{ .x = (i32)dst_ext.width, .y = (i32)dst_ext.height, .z = 1 },
|
|
},
|
|
.srcSubresource = {
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
.baseArrayLayer = 0,
|
|
.layerCount = 1,
|
|
.mipLevel = 0,
|
|
},
|
|
.dstSubresource = {
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
.baseArrayLayer = 0,
|
|
.layerCount = 1,
|
|
.mipLevel = 0,
|
|
},
|
|
};
|
|
|
|
VkBlitImageInfo2 blit_info = {
|
|
.sType = STYPE(BLIT_IMAGE_INFO_2),
|
|
.srcImage = src,
|
|
.srcImageLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
|
.dstImage = dst,
|
|
.dstImageLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
|
.filter = VK_FILTER_LINEAR,
|
|
.regionCount = 1,
|
|
.pRegions = &blit,
|
|
};
|
|
|
|
vkCmdBlitImage2(cmd, &blit_info);
|
|
}
|
|
|
|
static inline void
|
|
vImageTransitionLayout(VkCommandBuffer cmd, VkImage img, VkImageLayout curr, VkImageLayout new)
|
|
{
|
|
VkImageMemoryBarrier2 barrier = {
|
|
.sType = STYPE(IMAGE_MEMORY_BARRIER_2),
|
|
.srcStageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
|
|
.srcAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT,
|
|
.dstStageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
|
|
.dstAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT | VK_ACCESS_2_MEMORY_READ_BIT,
|
|
.oldLayout = curr,
|
|
.newLayout = new,
|
|
.image = img,
|
|
.subresourceRange = {
|
|
.aspectMask = new == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT,
|
|
.baseMipLevel = 0,
|
|
.levelCount = VK_REMAINING_MIP_LEVELS,
|
|
.baseArrayLayer = 0,
|
|
.layerCount = VK_REMAINING_ARRAY_LAYERS,
|
|
},
|
|
};
|
|
|
|
VkDependencyInfo dep_info = {
|
|
.sType = STYPE(DEPENDENCY_INFO),
|
|
.imageMemoryBarrierCount = 1,
|
|
.pImageMemoryBarriers = &barrier,
|
|
};
|
|
|
|
vkCmdPipelineBarrier2(cmd, &dep_info);
|
|
}
|
|
|
|
static inline void
|
|
vImageTransition(VkCommandBuffer cmd, vImage *img, VkImageLayout new)
|
|
{
|
|
vImageTransitionLayout(cmd, img->image, img->layout, new);
|
|
img->layout = new;
|
|
}
|
|
|
|
// ::Vulkan::Util::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Rendering::Functions::Start::
|
|
|
|
static void
|
|
vRenderingBegin()
|
|
{
|
|
VkCommandBuffer cmd = vFrameCmdBuf();
|
|
VkImage curr_img = v_Renderer.images.sc.data[v_Renderer.state.vk.image_idx].image.image;
|
|
|
|
vImageTransition(cmd, &v_Renderer.images.draw.image, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
|
|
vImageTransition(cmd, &v_Renderer.images.depth.image, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL);
|
|
|
|
vImageTransitionLayout(cmd, curr_img, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
|
|
|
VkRenderingAttachmentInfo col_attach_info = {
|
|
.sType = STYPE(RENDERING_ATTACHMENT_INFO),
|
|
.imageView = v_Renderer.images.draw.view,
|
|
.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
|
.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
|
|
.storeOp = VK_ATTACHMENT_STORE_OP_STORE,
|
|
.clearValue = { { 0.2f, 0.2f, 0.2f, 1.0f } },
|
|
};
|
|
|
|
VkRenderingAttachmentInfo depth_attach_info = {
|
|
.sType = STYPE(RENDERING_ATTACHMENT_INFO),
|
|
.imageView = v_Renderer.images.depth.view,
|
|
.imageLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
|
|
.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
|
|
.storeOp = VK_ATTACHMENT_STORE_OP_STORE,
|
|
};
|
|
|
|
VkRenderingInfo render_info = {
|
|
.sType = STYPE(RENDERING_INFO),
|
|
.layerCount = 1,
|
|
.colorAttachmentCount = 1,
|
|
.pColorAttachments = &col_attach_info,
|
|
.pDepthAttachment = &depth_attach_info,
|
|
.renderArea = {
|
|
.extent = {
|
|
.width = v_Renderer.state.swapchain.extent.width,
|
|
.height = v_Renderer.state.swapchain.extent.height,
|
|
},
|
|
},
|
|
};
|
|
|
|
vkCmdBeginRendering(cmd, &render_info);
|
|
|
|
vkCmdBindDescriptorSets(cmd, VK_PIPELINE_BIND_POINT_GRAPHICS, v_Renderer.handles.pipeline_layout, 0, vDT_MAX, v_Renderer.handles.desc_sets, 0, NULL);
|
|
}
|
|
|
|
// ::Vulkan::Rendering::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::ImmediateSubmit::Functions::Start::
|
|
|
|
static b32
|
|
vImmSubmitBegin(VkDevice device, VkFence fence, VkCommandBuffer cmd)
|
|
{
|
|
b32 success = true;
|
|
|
|
VkFence f = fence;
|
|
VkResult result = vkResetFences(device, 1, &f);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkResetFences failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
result = vkResetCommandBuffer(cmd, 0);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkResetCommandBuffer failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
VkCommandBufferBeginInfo buf_info = {
|
|
.sType = STYPE(COMMAND_BUFFER_BEGIN_INFO),
|
|
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
|
};
|
|
|
|
result = vkBeginCommandBuffer(cmd, &buf_info);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkBeginCommandBuffer failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vImmSubmitFinish(VkDevice device, VkFence fence, VkCommandBuffer cmd, VkQueue queue)
|
|
{
|
|
b32 success = true;
|
|
VkFence f = fence;
|
|
|
|
VkResult result = vkEndCommandBuffer(cmd);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkEndCommandBuffer imm failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
VkCommandBufferSubmitInfo cmd_submit_info = {
|
|
.sType = STYPE(COMMAND_BUFFER_SUBMIT_INFO),
|
|
.commandBuffer = cmd,
|
|
};
|
|
|
|
VkSubmitInfo2 submit_info = {
|
|
.sType = STYPE(SUBMIT_INFO_2),
|
|
.commandBufferInfoCount = 1,
|
|
.pCommandBufferInfos = &cmd_submit_info,
|
|
};
|
|
|
|
result = vkQueueSubmit2(queue, 1, &submit_info, f);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkQueueSubmit2 imm failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
result = vkWaitForFences(device, 1, &f, true, 9999999999);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkWaitForFences imm failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
// ::Vulkan::ImmediateSubmit::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Swapchain::Functions::Start::
|
|
|
|
static void
|
|
vSwapchainResize()
|
|
{
|
|
vkDeviceWaitIdle(v_Renderer.handles.device);
|
|
|
|
vSwapchainDestroy();
|
|
|
|
vDrawImagesDestroy();
|
|
|
|
// TODO: fix
|
|
//v_Renderer.state.swapchain.extent.width = v_Renderer.state.renderer.width;
|
|
//v_Renderer.state.swapchain.extent.height = v_Renderer.state.renderer.height;
|
|
|
|
Assert(vSwapchainInit(), "Unable to recreate swapchain");
|
|
Assert(vDrawImagesInit(), "Unable to recreate draw images");
|
|
}
|
|
|
|
// ::Vulkan::Swapchain::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Images::Functions::Start::
|
|
|
|
static vImageView *
|
|
vImageViewCreate(TexMeta meta)
|
|
{
|
|
vImageView *view = FLMemAlloc(sizeof(vImageView));
|
|
Assert(vImageViewInit(view, meta.w, meta.h, meta.ch), "vImageViewCreate failure: vImage");
|
|
return view;
|
|
}
|
|
|
|
static vTransfer *
|
|
vTextureTransferInit(Arena *arena, u32 asset_id, VkImage image, rawptr bytes, TexMeta *meta)
|
|
{
|
|
vTransfer *transfer = MakeArray(arena, vTransfer, 1);
|
|
|
|
transfer->type = vTT_IMAGE;
|
|
transfer->data = bytes;
|
|
transfer->w = meta->w;
|
|
transfer->h = meta->h;
|
|
transfer->ch = meta->ch;
|
|
transfer->asset_id = asset_id;
|
|
transfer->image = image;
|
|
|
|
return transfer;
|
|
}
|
|
|
|
static b32
|
|
vImageViewInit(vImageView *view, u32 width, u32 height, u32 channels)
|
|
{
|
|
b32 success = true;
|
|
|
|
VmaAllocationCreateInfo alloc_create_info = {
|
|
.usage = VMA_MEMORY_USAGE_GPU_ONLY,
|
|
.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
|
|
};
|
|
|
|
VkImageCreateInfo image_info = {
|
|
.sType = STYPE(IMAGE_CREATE_INFO),
|
|
.imageType = VK_IMAGE_TYPE_2D,
|
|
.mipLevels = 1,
|
|
.arrayLayers = 1,
|
|
.format = VK_FORMAT_R8G8B8A8_SRGB,
|
|
.tiling = VK_IMAGE_TILING_OPTIMAL,
|
|
.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
|
|
.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT,
|
|
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
|
.samples = VK_SAMPLE_COUNT_1_BIT,
|
|
.extent = {
|
|
.width = width,
|
|
.height = height,
|
|
.depth = 1,
|
|
},
|
|
};
|
|
|
|
if (!v_Renderer.state.vk.single_queue)
|
|
{
|
|
image_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
|
|
image_info.queueFamilyIndexCount = 2;
|
|
image_info.pQueueFamilyIndices = (u32[]){v_Renderer.state.vk.gfx_queue_idx, v_Renderer.state.vk.tfer_queue_idx};
|
|
}
|
|
|
|
VkResult result = vmaCreateImage(v_Renderer.handles.vma_alloc, &image_info, &alloc_create_info,
|
|
&view->image.image, &view->image.alloc, NULL);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vImageViewInit error: vmaCreateImage failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
VkImageViewCreateInfo view_info = {
|
|
.sType = STYPE(IMAGE_VIEW_CREATE_INFO),
|
|
.image = view->image.image,
|
|
.viewType = VK_IMAGE_VIEW_TYPE_2D,
|
|
.format = VK_FORMAT_R8G8B8A8_SRGB,
|
|
.subresourceRange = {
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
.levelCount = 1,
|
|
.layerCount = 1,
|
|
},
|
|
};
|
|
|
|
result = vkCreateImageView(v_Renderer.handles.device, &view_info, NULL, &view->view);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vImageViewInit error: vkCreateImageView failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static void
|
|
vTextureCleanUp()
|
|
{
|
|
VkDevice device = v_Renderer.handles.device;
|
|
VmaAllocator vma_alloc = v_Renderer.handles.vma_alloc;
|
|
HashTable *table = &v_Renderer.buffers.images;
|
|
b8 *queue = vFrameTexDestroyQueue();
|
|
|
|
// NOTE: might need a mutex here at some point, check if crashes related to image access
|
|
for (u64 i = 0; i < TEXTURE_ASSET_MAX; i++)
|
|
{
|
|
if (queue[i])
|
|
{
|
|
rDescHandle handle = vDescHandlePop(vDT_SAMPLED_IMAGE, (u32)i);
|
|
vDescIndexPush(vDT_SAMPLED_IMAGE, handle.desc_index);
|
|
|
|
vImageView *view = vImagePop(i);
|
|
Assert(view != NULL, "rTextureUnload failure: value not in hash table");
|
|
|
|
vkDestroyImageView(device, view->view, NULL);
|
|
vmaDestroyImage(vma_alloc, view->image.image, view->image.alloc);
|
|
|
|
FLMemFree(view);
|
|
|
|
queue[i] = false;
|
|
}
|
|
}
|
|
|
|
pAtomicSignalFenceSeqCst();
|
|
}
|
|
|
|
static void
|
|
vImagePush(TextureAsset asset_id, vImageView *view)
|
|
{
|
|
HashTablePushU64Rawptr(&v_Renderer.buffers.images, asset_id, view);
|
|
}
|
|
|
|
static vImageView *
|
|
vImagePop(TextureAsset asset_id)
|
|
{
|
|
return (vImageView *)HashTableDeleteU64Rawptr(&v_Renderer.buffers.images, asset_id);
|
|
}
|
|
|
|
static vImageView *
|
|
vImageSearch(TextureAsset asset_id)
|
|
{
|
|
vImageView *view = NULL;
|
|
|
|
HashTable *table = &v_Renderer.buffers.images;
|
|
|
|
KeyValuePair *pair = HashTableSearchU64(table, asset_id);
|
|
if (pair != NULL)
|
|
{
|
|
view = (vImageView *)pair->value_rawptr;
|
|
}
|
|
|
|
return view;
|
|
}
|
|
|
|
// ::Vulkan::Images::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Descriptors::Functions::Start::
|
|
|
|
|
|
static void
|
|
vDescPushImageAndHandle(rDescHandle handle, vImageView *view)
|
|
{
|
|
vDescHandlePush(vDT_SAMPLED_IMAGE, handle);
|
|
vImagePush(handle.asset_id, view);
|
|
}
|
|
|
|
static void
|
|
vDescPushModelAndHandle(rDescHandle handle, vModelBuffers *buffer)
|
|
{
|
|
vDescHandlePush(vDT_MESH, handle);
|
|
vModelPush(handle.asset_id, buffer);
|
|
}
|
|
|
|
// TODO: batch descriptor writes
|
|
static u32
|
|
vDescPushImageDesc(vImageView *view)
|
|
{
|
|
u32 index = vDescIndexPop(vDT_SAMPLED_IMAGE);
|
|
|
|
VkDescriptorImageInfo image_info = {
|
|
.imageView = view->view,
|
|
.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
|
|
};
|
|
|
|
VkWriteDescriptorSet desc_write = {
|
|
.sType = STYPE(WRITE_DESCRIPTOR_SET),
|
|
.dstSet = v_Renderer.handles.desc_sets[vDT_SAMPLED_IMAGE],
|
|
.dstBinding = 0,
|
|
.descriptorCount = 1,
|
|
.dstArrayElement = index,
|
|
.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
|
|
.pImageInfo = &image_info,
|
|
};
|
|
|
|
vkUpdateDescriptorSets(v_Renderer.handles.device, 1, &desc_write, 0, NULL);
|
|
|
|
return index;
|
|
}
|
|
|
|
static u32
|
|
vDescPushMeshDesc(vMeshBuffer *buffer)
|
|
{
|
|
u32 index = vDescIndexPop(vDT_MESH);
|
|
|
|
VkDescriptorBufferInfo buffer_info = {
|
|
.buffer = buffer->uniform.buffer,
|
|
.offset = 0,
|
|
.range = VK_WHOLE_SIZE,
|
|
};
|
|
|
|
VkWriteDescriptorSet desc_write = {
|
|
.sType = STYPE(WRITE_DESCRIPTOR_SET),
|
|
.dstSet = v_Renderer.handles.desc_sets[vDT_MESH],
|
|
.dstBinding = 0,
|
|
.descriptorCount = 1,
|
|
.dstArrayElement = index,
|
|
.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
.pBufferInfo = &buffer_info,
|
|
};
|
|
|
|
vkUpdateDescriptorSets(v_Renderer.handles.device, 1, &desc_write, 0, NULL);
|
|
|
|
return index;
|
|
}
|
|
|
|
static void
|
|
vDescIndexPush(vDescType type, u32 index)
|
|
{
|
|
vDescBindings *bindings = v_Renderer.desc_bindings + type;
|
|
|
|
Assert(bindings->free_count < DESC_MAX_BINDINGS-1, "vDescIndexPush failure: free_count equal to DESC_MAX_BINDINGS-1");
|
|
|
|
bindings->free[bindings->free_count] = index;
|
|
bindings->free_count += 1;
|
|
}
|
|
|
|
static u32
|
|
vDescIndexPop(vDescType type)
|
|
{
|
|
vDescBindings *bindings = v_Renderer.desc_bindings + type;
|
|
|
|
Assert(bindings->free_count > 0, "vDescIndexPop failure: free_count is 0");
|
|
|
|
bindings->free_count -= 1;
|
|
return bindings->free[bindings->free_count];
|
|
}
|
|
|
|
static rDescHandle
|
|
vDescHandleSearch(vDescType type, u32 asset_id)
|
|
{
|
|
rDescHandle asset_info = {
|
|
.asset_id = UINT32_MAX,
|
|
};
|
|
|
|
HashTable *table = &v_Renderer.desc_bindings[type].lookup_table;
|
|
|
|
KeyValuePair *kv_pair = HashTableSearchU64(table, asset_id);
|
|
if (kv_pair != NULL)
|
|
{
|
|
asset_info.asset_id = kv_pair->value_u64_split.upper;
|
|
asset_info.desc_index = kv_pair->value_u64_split.lower;
|
|
}
|
|
|
|
return asset_info;
|
|
}
|
|
|
|
static void
|
|
vDescHandlePush(vDescType type, rDescHandle handle)
|
|
{
|
|
HashTable *table = &v_Renderer.desc_bindings[type].lookup_table;
|
|
HashTablePushU64U64Split(table, handle.asset_id, handle.asset_id, handle.desc_index);
|
|
}
|
|
|
|
static rDescHandle
|
|
vDescHandlePop(vDescType type, u32 asset_id)
|
|
{
|
|
HashTable *table = &v_Renderer.desc_bindings[type].lookup_table;
|
|
|
|
U64Split split = HashTableDeleteU64U64Split(table, (u64)asset_id);
|
|
Assert(split.upper != UINT32_MAX, "vDescHandlePop failure: unable to find asset handle");
|
|
|
|
rDescHandle handle = {
|
|
.asset_id = split.upper,
|
|
.desc_index = split.lower,
|
|
};
|
|
|
|
return handle;
|
|
}
|
|
|
|
static void
|
|
vDescHandleDelete(vDescType type, u32 asset_id)
|
|
{
|
|
HashTable *table = &v_Renderer.desc_bindings[type].lookup_table;
|
|
HashTableDeleteU64(table, asset_id);
|
|
}
|
|
|
|
// ::Vulkan::Descriptors::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Buffers::Functions::Start::
|
|
|
|
static vTransfer *
|
|
vMeshTransferInit(Arena *arena, u32 asset_id, vMeshBuffer *mesh, rawptr bytes, u64 size)
|
|
{
|
|
vTransfer *transfer = MakeArray(arena, vTransfer, 1);
|
|
|
|
transfer->type = vTT_MESH;
|
|
transfer->data = bytes;
|
|
transfer->size = size;
|
|
transfer->mesh = mesh;
|
|
transfer->asset_id = asset_id;
|
|
|
|
return transfer;
|
|
}
|
|
|
|
static vTransfer *
|
|
vBufferTransferInit(Arena *arena, u32 asset_id, vBuffer *index, rawptr bytes, u64 size)
|
|
{
|
|
vTransfer *transfer = MakeArray(arena, vTransfer, 1);
|
|
|
|
transfer->type = vTT_BUFFER;
|
|
transfer->data = bytes;
|
|
transfer->asset_id = asset_id;
|
|
transfer->size = size;
|
|
transfer->buffer = index->buffer;
|
|
|
|
return transfer;
|
|
}
|
|
|
|
static VkResult
|
|
vBufferCreate(vBuffer* buf, rRenderBufferType type, u64 size)
|
|
{
|
|
Assert(type != rRBT_NONE, "vBufferCreate: rRenderBufferType must not be rRBT_NONE");
|
|
|
|
VkResult result;
|
|
|
|
u32 gfx_queue = v_Renderer.state.vk.gfx_queue_idx;
|
|
u32 tfer_queue = v_Renderer.state.vk.tfer_queue_idx;
|
|
VmaAllocator alloc = v_Renderer.handles.vma_alloc;
|
|
|
|
VkBufferCreateInfo buffer_info = {
|
|
.sType = STYPE(BUFFER_CREATE_INFO),
|
|
.size = size,
|
|
};
|
|
|
|
VmaAllocationCreateInfo alloc_info = {
|
|
.usage = VMA_MEMORY_USAGE_UNKNOWN,
|
|
.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT,
|
|
};
|
|
|
|
if (BitEq(type, rRBT_VERTEX))
|
|
{
|
|
buffer_info.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
|
|
}
|
|
else if (BitEq(type, rRBT_INDEX))
|
|
{
|
|
buffer_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
|
|
}
|
|
else if (BitEq(type, rRBT_UNIFORM))
|
|
{
|
|
buffer_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
|
|
}
|
|
else if (BitEq(type, rRBT_STAGING))
|
|
{
|
|
buffer_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
|
|
}
|
|
else if (BitEq(type, rRBT_STORAGE))
|
|
{
|
|
buffer_info.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
|
|
}
|
|
|
|
|
|
if (BitEq(type, rRBT_ADDR))
|
|
{
|
|
buffer_info.usage |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
|
|
}
|
|
|
|
if ((type & rRBT_HOST) == rRBT_HOST || type == rRBT_STAGING)
|
|
{
|
|
alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
alloc_info.preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
|
|
}
|
|
else
|
|
{
|
|
buffer_info.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
|
alloc_info.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
|
}
|
|
|
|
if (tfer_queue != gfx_queue)
|
|
{
|
|
buffer_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
|
|
buffer_info.queueFamilyIndexCount = 2;
|
|
buffer_info.pQueueFamilyIndices = (u32[]){gfx_queue, tfer_queue};
|
|
}
|
|
|
|
VmaAllocationInfo vma_info;
|
|
result = vmaCreateBuffer(alloc, &buffer_info, &alloc_info, &buf->buffer, &buf->alloc, &vma_info);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vmaCreateBuffer failure: %s", vVkResultStr(result));
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
static rawptr
|
|
vMapBuffer(VmaAllocation alloc)
|
|
{
|
|
rawptr ptr;
|
|
vmaMapMemory(v_Renderer.handles.vma_alloc, alloc, &ptr);
|
|
return ptr;
|
|
}
|
|
|
|
static void
|
|
vModelPush(ModelAsset asset_id, vModelBuffers *buffer)
|
|
{
|
|
HashTablePushU64Rawptr(&v_Renderer.buffers.buffers, asset_id, buffer);
|
|
}
|
|
|
|
static vModelBuffers *
|
|
vModelPop(ModelAsset asset_id)
|
|
{
|
|
return (vModelBuffers *)HashTableDeleteU64Rawptr(&v_Renderer.buffers.buffers, asset_id);
|
|
}
|
|
|
|
static vModelBuffers *
|
|
vModelSearch(ModelAsset asset_id)
|
|
{
|
|
vModelBuffers *buffers = NULL;
|
|
|
|
KeyValuePair *pair = HashTableSearchU64(&v_Renderer.buffers.buffers, asset_id);
|
|
if (pair != NULL)
|
|
{
|
|
buffers = (vModelBuffers *)pair->value_rawptr;
|
|
}
|
|
|
|
return buffers;
|
|
}
|
|
|
|
// ::Vulkan::Buffers::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Init::Functions::Start::
|
|
|
|
static b32
|
|
vInitInstance()
|
|
{
|
|
b32 success = true;
|
|
|
|
Assert(vGlobalFunctionsInit(), "Unable to load vulkan functions");
|
|
|
|
VkResult result = vkCreateInstance(&g_Instance_Info, NULL, &v_Renderer.handles.inst);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
success = false;
|
|
Printfln("vkCreateInstance failure: %s", vVkResultStr(result));
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vRenderDocInit()
|
|
{
|
|
pLibrary lib; pFunction fn; int result = 0;
|
|
|
|
b32 found = pLibraryLoad(RENDERDOC_LIB, &lib);
|
|
if (found)
|
|
{
|
|
found = pFunctionLoad("RENDERDOC_GetAPI", &lib, &fn);
|
|
if (found)
|
|
{
|
|
pRENDERDOC_GetAPI RENDERDOC_GetAPI = (pRENDERDOC_GetAPI)fn.fn;
|
|
result = RENDERDOC_GetAPI(eRENDERDOC_API_Version_1_1_2, (void **)&v_rdoc_api);
|
|
}
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
static void
|
|
vEnableDebug()
|
|
{
|
|
if (vValidationSupported())
|
|
{
|
|
Assert(vkCreateDebugUtilsMessengerEXT(v_Renderer.handles.inst, &g_Debug_Message_Info, NULL, &v_Renderer.handles.debug) == VK_SUCCESS,
|
|
"Unable to initialize debug messenger");
|
|
}
|
|
else
|
|
{
|
|
Printfln("Validation layers not supported, continuing without.");
|
|
}
|
|
}
|
|
|
|
static void
|
|
vArenasInit()
|
|
{
|
|
v_Renderer.mem.perm_arena = ArenaCreateDebug(MB(16), __LINE__);
|
|
|
|
for (u32 i = 0; i < FRAME_OVERLAP; i++)
|
|
{
|
|
v_Renderer.mem.frame_arenas[i] = ArenaCreateDebug(MB(8), i);
|
|
}
|
|
}
|
|
|
|
static b32
|
|
vVmaAllocatorInit()
|
|
{
|
|
VmaVulkanFunctions vk_functions = {
|
|
.vkGetInstanceProcAddr = vkGetInstanceProcAddr,
|
|
.vkGetDeviceProcAddr = vkGetDeviceProcAddr,
|
|
};
|
|
g_VMA_Create_Info.pVulkanFunctions = &vk_functions;
|
|
g_VMA_Create_Info.physicalDevice = v_Renderer.handles.phys_device;
|
|
g_VMA_Create_Info.device = v_Renderer.handles.device;
|
|
g_VMA_Create_Info.instance = v_Renderer.handles.inst;
|
|
|
|
VkResult result = vmaCreateAllocator(&g_VMA_Create_Info, &v_Renderer.handles.vma_alloc);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printf("vmaCreateAllocator failure: %d", result);
|
|
}
|
|
|
|
return result == VK_SUCCESS;
|
|
}
|
|
|
|
static b32
|
|
vQueueCheckSurfaceSupport(i32 index, VkPhysicalDevice device, VkSurfaceKHR surface)
|
|
{
|
|
b32 surface_supported;
|
|
vkGetPhysicalDeviceSurfaceSupportKHR(device, (u32)index, surface, &surface_supported);
|
|
return surface_supported;
|
|
}
|
|
|
|
static vDeviceQueues
|
|
vQueueCheckSupport(VkPhysicalDevice device, VkSurfaceKHR surface)
|
|
{
|
|
vDeviceQueues queues = { .graphics = -1, .transfer = -1 };
|
|
Arena *arena = vFrameArena();
|
|
|
|
u32 queue_count;
|
|
vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_count, NULL);
|
|
VkQueueFamilyProperties *families = MakeArray(arena, VkQueueFamilyProperties, queue_count);
|
|
vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_count, families);
|
|
|
|
if (queue_count == 1 &&
|
|
BitEq(families[0].queueFlags, VK_QUEUE_TRANSFER_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT) &&
|
|
families[0].queueCount == 1)
|
|
{
|
|
queues.graphics = queues.transfer = 0;
|
|
queues.single_queue = true;
|
|
}
|
|
else
|
|
{
|
|
b8 sparse_binding = false;
|
|
b8 transfer_only = false;
|
|
for (i32 i = 0; i < queue_count; i++)
|
|
{
|
|
if (queues.graphics < 0 && vQueueCheckSurfaceSupport(i, device, surface) && BitEq(families[i].queueFlags, VK_QUEUE_GRAPHICS_BIT))
|
|
{
|
|
queues.graphics = i;
|
|
continue;
|
|
}
|
|
|
|
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT) && !BitEq(families[i].queueFlags, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT))
|
|
{
|
|
sparse_binding = true;
|
|
transfer_only = true;
|
|
queues.transfer = i;
|
|
continue;
|
|
}
|
|
|
|
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT | VK_QUEUE_SPARSE_BINDING_BIT) && !(sparse_binding && transfer_only))
|
|
{
|
|
sparse_binding = true;
|
|
queues.transfer = i;
|
|
continue;
|
|
}
|
|
|
|
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT) && !BitEq(families[i].queueFlags, VK_QUEUE_COMPUTE_BIT) && !sparse_binding)
|
|
{
|
|
transfer_only = true;
|
|
queues.transfer = i;
|
|
continue;
|
|
}
|
|
|
|
if (BitEq(families[i].queueFlags, VK_QUEUE_TRANSFER_BIT) && !sparse_binding && !transfer_only)
|
|
queues.transfer = i;
|
|
}
|
|
|
|
if (queues.transfer < 0)
|
|
queues.transfer = queues.graphics;
|
|
}
|
|
|
|
return queues;
|
|
}
|
|
|
|
static b32
|
|
vDeviceCheckPropertiesSupport(VkPhysicalDevice device, VkSurfaceKHR surface, b32 *discrete)
|
|
{
|
|
b32 success = false;
|
|
Arena *arena = vFrameArena();
|
|
|
|
VkPhysicalDeviceProperties properties = {0};
|
|
vkGetPhysicalDeviceProperties(device, &properties);
|
|
|
|
if (VK_API_VERSION_MINOR(properties.apiVersion) >= 3)
|
|
{
|
|
u32 ext_count;
|
|
vkEnumerateDeviceExtensionProperties(device, NULL, &ext_count, NULL);
|
|
VkExtensionProperties *ext_properties = ArenaAlloc(arena, sizeof(VkExtensionProperties) * ext_count);
|
|
vkEnumerateDeviceExtensionProperties(device, NULL, &ext_count, ext_properties);
|
|
|
|
i32 matched = 0;
|
|
for (u32 i = 0; i < ext_count; i++) {
|
|
for (i32 j = 0; j < Len(g_Device_Extensions); j++) {
|
|
if (StrEq(ext_properties[i].extensionName, g_Device_Extensions[j])) {
|
|
matched++;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (matched == Len(g_Device_Extensions))
|
|
{
|
|
u32 fmt_count, present_count;
|
|
vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &fmt_count, NULL);
|
|
vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, NULL);
|
|
|
|
*discrete = properties.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
|
|
|
|
success = fmt_count && present_count;
|
|
}
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vDeviceCheckFeatureSupport(VkPhysicalDevice device)
|
|
{
|
|
VkPhysicalDeviceFeatures2 features2 = { .sType = STYPE(PHYSICAL_DEVICE_FEATURES_2) };
|
|
|
|
VkPhysicalDeviceVulkan12Features features_12 = { .sType = STYPE(PHYSICAL_DEVICE_VULKAN_1_2_FEATURES) };
|
|
VkPhysicalDeviceVulkan13Features features_13 = { .sType = STYPE(PHYSICAL_DEVICE_VULKAN_1_3_FEATURES) };
|
|
|
|
features2.pNext = &features_12;
|
|
vkGetPhysicalDeviceFeatures2(device, &features2);
|
|
features2.pNext = &features_13;
|
|
vkGetPhysicalDeviceFeatures2(device, &features2);
|
|
|
|
VkPhysicalDeviceFeatures features = features2.features;
|
|
b32 result = true;
|
|
|
|
result &= (b32)features.shaderUniformBufferArrayDynamicIndexing;
|
|
result &= (b32)features.shaderSampledImageArrayDynamicIndexing;
|
|
result &= (b32)features.shaderStorageBufferArrayDynamicIndexing;
|
|
result &= (b32)features.shaderStorageImageArrayDynamicIndexing;
|
|
result &= (b32)features.samplerAnisotropy;
|
|
|
|
result &= (b32)features_12.descriptorIndexing;
|
|
result &= (b32)features_12.bufferDeviceAddress;
|
|
result &= (b32)features_12.descriptorBindingUniformBufferUpdateAfterBind;
|
|
result &= (b32)features_12.descriptorBindingSampledImageUpdateAfterBind;
|
|
result &= (b32)features_12.descriptorBindingStorageImageUpdateAfterBind;
|
|
result &= (b32)features_12.descriptorBindingStorageBufferUpdateAfterBind;
|
|
result &= (b32)features_12.descriptorBindingPartiallyBound;
|
|
result &= (b32)features_12.runtimeDescriptorArray;
|
|
result &= (b32)features_12.shaderSampledImageArrayNonUniformIndexing;
|
|
result &= (b32)features_12.shaderUniformBufferArrayNonUniformIndexing;
|
|
|
|
result &= (b32)features_13.synchronization2;
|
|
result &= (b32)features_13.dynamicRendering;
|
|
|
|
return result;
|
|
}
|
|
|
|
static b32
|
|
vDeviceInit()
|
|
{
|
|
VkInstance inst = v_Renderer.handles.inst;
|
|
VkSurfaceKHR surface = v_Renderer.handles.surface;
|
|
Arena *arena = vFrameArena();
|
|
|
|
u32 device_count;
|
|
vkEnumeratePhysicalDevices(inst, &device_count, NULL);
|
|
VkPhysicalDevice *devices = ArenaAlloc(arena, sizeof(VkPhysicalDevice) * device_count);
|
|
vkEnumeratePhysicalDevices(inst, &device_count, devices);
|
|
|
|
b32 discrete_device = false;
|
|
vDeviceQueues queues = {0};
|
|
VkPhysicalDevice phys_device = NULL;
|
|
for (u32 i = 0; i < device_count; i++) {
|
|
vDeviceQueues current_queues = vQueueCheckSupport(devices[i], surface);
|
|
b32 discrete = false;
|
|
|
|
if (current_queues.graphics < 0)
|
|
continue;
|
|
if (!vDeviceCheckPropertiesSupport(devices[i], surface, &discrete))
|
|
continue;
|
|
if (discrete_device && !discrete)
|
|
continue;
|
|
if (!vDeviceCheckFeatureSupport(devices[i]))
|
|
continue;
|
|
|
|
discrete_device = discrete;
|
|
queues = current_queues;
|
|
phys_device = devices[i];
|
|
|
|
if (discrete_device && queues.graphics != queues.transfer)
|
|
break;
|
|
}
|
|
|
|
b32 success = false;
|
|
if (phys_device != NULL)
|
|
{
|
|
VkDeviceQueueCreateInfo queue_info[2] = {0};
|
|
f32 priority = 1.0f;
|
|
u32 count = 1;
|
|
u32 transfer_queue_index = 0;
|
|
|
|
queue_info[0].sType = STYPE(DEVICE_QUEUE_CREATE_INFO);
|
|
queue_info[0].queueFamilyIndex = queues.graphics;
|
|
queue_info[0].queueCount = 1;
|
|
queue_info[0].pQueuePriorities = &priority;
|
|
queue_info[0].flags = 0;
|
|
|
|
if (!queues.single_queue) {
|
|
queue_info[1].sType = STYPE(DEVICE_QUEUE_CREATE_INFO);
|
|
queue_info[1].queueFamilyIndex = queues.transfer;
|
|
queue_info[1].queueCount = 1;
|
|
queue_info[1].pQueuePriorities = &priority;
|
|
queue_info[1].flags = 0;
|
|
|
|
if (queues.transfer == queues.graphics)
|
|
transfer_queue_index = 1;
|
|
|
|
count++;
|
|
}
|
|
|
|
g_Device_Info.queueCreateInfoCount = count;
|
|
g_Device_Info.pQueueCreateInfos = &queue_info[0];
|
|
|
|
VkResult result = vkCreateDevice(phys_device, &g_Device_Info, NULL, &v_Renderer.handles.device);
|
|
if (result != VK_SUCCESS) {
|
|
Printf("vkCreateDevice failure: %d", result);
|
|
}
|
|
else
|
|
{
|
|
Assert(vDeviceFunctionsInit(), "Failed to initialize device functions");
|
|
|
|
vkGetDeviceQueue(
|
|
v_Renderer.handles.device,
|
|
queues.graphics,
|
|
0,
|
|
&queues.graphics_queue);
|
|
|
|
vkGetDeviceQueue(
|
|
v_Renderer.handles.device,
|
|
queues.transfer,
|
|
transfer_queue_index,
|
|
&queues.transfer_queue);
|
|
|
|
v_Renderer.handles.phys_device = phys_device;
|
|
|
|
v_Renderer.handles.gfx_queue = queues.graphics_queue;
|
|
v_Renderer.handles.tfer_queue = queues.transfer_queue;
|
|
|
|
v_Renderer.state.vk.gfx_queue_idx = queues.graphics;
|
|
v_Renderer.state.vk.tfer_queue_idx = queues.transfer;
|
|
|
|
success = true;
|
|
}
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vGlobalFunctionsInit()
|
|
{
|
|
b32 result = vLibraryLoad();
|
|
if (result)
|
|
{
|
|
INIT_FN(vkGetInstanceProcAddr);
|
|
INIT_FN(vkEnumerateInstanceLayerProperties);
|
|
INIT_FN(vkCreateInstance);
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
static b32
|
|
vInstanceFunctionsInit()
|
|
{
|
|
VkInstance instance = v_Renderer.handles.inst;
|
|
|
|
#ifdef __linux__
|
|
{
|
|
INIT_INST_FN(vkCreateXcbSurfaceKHR);
|
|
}
|
|
#elif _WIN32
|
|
{
|
|
INIT_INST_FN(vkCreateWin32SurfaceKHR);
|
|
}
|
|
#endif
|
|
|
|
#ifdef BUILD_DEBUG
|
|
{
|
|
INIT_INST_FN(vkCreateDebugUtilsMessengerEXT);
|
|
INIT_INST_FN(vkDestroyDebugUtilsMessengerEXT);
|
|
}
|
|
#endif
|
|
|
|
INIT_INST_FN(vkEnumeratePhysicalDevices);
|
|
INIT_INST_FN(vkGetPhysicalDeviceQueueFamilyProperties);
|
|
INIT_INST_FN(vkGetPhysicalDeviceSurfaceSupportKHR);
|
|
INIT_INST_FN(vkCreateDevice);
|
|
INIT_INST_FN(vkGetPhysicalDeviceProperties);
|
|
INIT_INST_FN(vkGetPhysicalDeviceFeatures2);
|
|
INIT_INST_FN(vkGetPhysicalDeviceSurfaceFormatsKHR);
|
|
INIT_INST_FN(vkGetPhysicalDeviceSurfacePresentModesKHR);
|
|
INIT_INST_FN(vkEnumerateDeviceExtensionProperties);
|
|
INIT_INST_FN(vkGetDeviceProcAddr);
|
|
INIT_INST_FN(vkDestroyInstance);
|
|
INIT_INST_FN(vkDestroySurfaceKHR);
|
|
INIT_INST_FN(vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
|
|
INIT_INST_FN(vkGetPhysicalDeviceImageFormatProperties);
|
|
|
|
return true;
|
|
}
|
|
|
|
static b32
|
|
vDeviceFunctionsInit()
|
|
{
|
|
VkDevice device = v_Renderer.handles.device;
|
|
|
|
INIT_DEV_FN(vkCreateSwapchainKHR);
|
|
INIT_DEV_FN(vkCreateImage);
|
|
INIT_DEV_FN(vkCreateImageView);
|
|
INIT_DEV_FN(vkGetSwapchainImagesKHR);
|
|
INIT_DEV_FN(vkGetDeviceQueue);
|
|
INIT_DEV_FN(vkCreateSemaphore);
|
|
INIT_DEV_FN(vkAllocateCommandBuffers);
|
|
INIT_DEV_FN(vkCreateCommandPool);
|
|
INIT_DEV_FN(vkCreateFence);
|
|
INIT_DEV_FN(vkCreateDescriptorPool);
|
|
INIT_DEV_FN(vkCreateDescriptorSetLayout);
|
|
INIT_DEV_FN(vkAllocateDescriptorSets);
|
|
INIT_DEV_FN(vkCreatePipelineLayout);
|
|
INIT_DEV_FN(vkResetDescriptorPool);
|
|
INIT_DEV_FN(vkCreateShaderModule);
|
|
INIT_DEV_FN(vkCreateGraphicsPipelines);
|
|
INIT_DEV_FN(vkCreateComputePipelines);
|
|
INIT_DEV_FN(vkUpdateDescriptorSets);
|
|
INIT_DEV_FN(vkDestroyDevice);
|
|
INIT_DEV_FN(vkDestroyDescriptorPool);
|
|
INIT_DEV_FN(vkDestroySwapchainKHR);
|
|
INIT_DEV_FN(vkDestroyImage);
|
|
INIT_DEV_FN(vkDestroyImageView);
|
|
INIT_DEV_FN(vkDestroyCommandPool);
|
|
INIT_DEV_FN(vkDestroySemaphore);
|
|
INIT_DEV_FN(vkDestroyFence);
|
|
INIT_DEV_FN(vkDestroyPipelineLayout);
|
|
INIT_DEV_FN(vkDestroyPipeline);
|
|
INIT_DEV_FN(vkWaitForFences);
|
|
INIT_DEV_FN(vkBeginCommandBuffer);
|
|
INIT_DEV_FN(vkEndCommandBuffer);
|
|
INIT_DEV_FN(vkAcquireNextImageKHR);
|
|
INIT_DEV_FN(vkCmdBindPipeline);
|
|
INIT_DEV_FN(vkCmdBindDescriptorSets);
|
|
INIT_DEV_FN(vkCmdDispatch);
|
|
INIT_DEV_FN(vkCmdBeginRendering);
|
|
INIT_DEV_FN(vkCmdEndRendering);
|
|
INIT_DEV_FN(vkCmdSetViewport);
|
|
INIT_DEV_FN(vkCmdSetScissor);
|
|
INIT_DEV_FN(vkCmdPushConstants);
|
|
INIT_DEV_FN(vkCmdBindIndexBuffer);
|
|
INIT_DEV_FN(vkCmdBindVertexBuffers);
|
|
INIT_DEV_FN(vkCmdDrawIndexed);
|
|
INIT_DEV_FN(vkCmdBlitImage2);
|
|
INIT_DEV_FN(vkCmdPipelineBarrier2);
|
|
INIT_DEV_FN(vkCmdCopyBufferToImage);
|
|
INIT_DEV_FN(vkCmdCopyBuffer);
|
|
INIT_DEV_FN(vkQueueSubmit2);
|
|
INIT_DEV_FN(vkResetFences);
|
|
INIT_DEV_FN(vkResetCommandBuffer);
|
|
INIT_DEV_FN(vkFreeCommandBuffers);
|
|
INIT_DEV_FN(vkDestroyDescriptorSetLayout);
|
|
INIT_DEV_FN(vkDestroyShaderModule);
|
|
INIT_DEV_FN(vkQueuePresentKHR);
|
|
INIT_DEV_FN(vkCmdDraw);
|
|
INIT_DEV_FN(vkDeviceWaitIdle);
|
|
INIT_DEV_FN(vkCmdClearColorImage);
|
|
INIT_DEV_FN(vkCreateSampler);
|
|
INIT_DEV_FN(vkDestroySampler);
|
|
INIT_DEV_FN(vkGetBufferDeviceAddress);
|
|
|
|
return true;
|
|
}
|
|
|
|
// TODO(MA): implement other platforms
|
|
#ifdef __linux__
|
|
static b32
|
|
vSurfaceInit()
|
|
{
|
|
pPlatformWindow *window = pWindowGet();
|
|
VkXcbSurfaceCreateInfoKHR surface_info = {
|
|
.sType = STYPE(XCB_SURFACE_CREATE_INFO_KHR),
|
|
.connection = window->connection,
|
|
.window = window->window
|
|
};
|
|
|
|
VkResult result = vkCreateXcbSurfaceKHR(v_Renderer.handles.inst, &surface_info, NULL, &v_Renderer.handles.surface);
|
|
if (result != VK_SUCCESS) {
|
|
Printf("Unable to create surface: %d", result);
|
|
}
|
|
|
|
return result == VK_SUCCESS;
|
|
}
|
|
#elif _WIN32
|
|
static b32
|
|
vSurfaceInit()
|
|
{
|
|
b32 success = true;
|
|
|
|
pPlatformWindow *window = pWindowGet();
|
|
VkWin32SurfaceCreateInfoKHR surface_info = {
|
|
.sType = STYPE(WIN32_SURFACE_CREATE_INFO_KHR),
|
|
.hinstance = window->instance,
|
|
.hwnd = window->handle,
|
|
};
|
|
|
|
VkResult result = vkCreateWin32SurfaceKHR(v_Renderer.handles.inst, &surface_info, NULL, &v_Renderer.handles.surface);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("Unable to create surface: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
return success;
|
|
}
|
|
#endif
|
|
|
|
static b32
|
|
vLibraryLoad()
|
|
{
|
|
pLibrary *lib = &v_Renderer.handles.lib;
|
|
b32 lib_found; pFunction fn;
|
|
|
|
for (i32 i = 0; i < Len(vulkan_libs); i++)
|
|
{
|
|
lib_found = pLibraryLoad(vulkan_libs[i], lib);
|
|
if (lib_found)
|
|
{
|
|
lib_found = pFunctionLoad("vkGetInstanceProcAddr", lib, &fn);
|
|
vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)fn.fn;
|
|
break;
|
|
}
|
|
}
|
|
|
|
return lib_found;
|
|
}
|
|
|
|
static b32
|
|
vFrameStructuresInit()
|
|
{
|
|
b32 success = true;
|
|
u32 img_count = v_Renderer.images.sc.length;
|
|
|
|
g_Pool_Create_Info.queueFamilyIndex = v_Renderer.state.vk.gfx_queue_idx;
|
|
|
|
for (u32 i = 0; i < FRAME_OVERLAP; i++)
|
|
{
|
|
VkResult result;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
vFrameHandles *handles = &v_Renderer.frame_handles[i];
|
|
|
|
result = vkCreateCommandPool(device, &g_Pool_Create_Info, NULL, &handles->pool);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
g_Command_Buffer_Info.commandPool = handles->pool;
|
|
|
|
result = vkAllocateCommandBuffers(device, &g_Command_Buffer_Info, &handles->buffer);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
result = vkCreateFence(device, &g_Fence_Create_Info, NULL, &handles->r_fence);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
result = vkCreateSemaphore(device, &g_Semaphore_Create_info, NULL, &handles->r_sem);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
result = vkCreateSemaphore(device, &g_Semaphore_Create_info, NULL, &handles->sc_sem);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
//renderer.vk.frame.buffer_destroy_queues[i] = ArenaAlloc(v_Renderer.mem.perm_arena, sizeof(rRenderBuffer) * 64);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vImmediateStructuresInit()
|
|
{
|
|
b32 success = true;
|
|
VkResult result;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
vImmHandles *imm = &v_Renderer.imm;
|
|
g_Pool_Create_Info.queueFamilyIndex = v_Renderer.state.vk.tfer_queue_idx;
|
|
|
|
result = vkCreateCommandPool(device, &g_Pool_Create_Info, NULL, &imm->pool);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
g_Command_Buffer_Info.commandPool = imm->pool;
|
|
|
|
result = vkAllocateCommandBuffers(device, &g_Command_Buffer_Info, &imm->buffer);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
result = vkCreateFence(device, &g_Fence_Create_Info, NULL, &imm->fence);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
return success;
|
|
}
|
|
|
|
static void
|
|
vUploadQueuesInit()
|
|
{
|
|
v_Renderer.upload.transfers = MakeArray(v_Renderer.mem.perm_arena, vTransfer *, 256);
|
|
}
|
|
|
|
static b32
|
|
vSwapchainInit()
|
|
{
|
|
b32 success = true;
|
|
VkPhysicalDevice phys_device = v_Renderer.handles.phys_device;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
VkSurfaceKHR surface = v_Renderer.handles.surface;
|
|
VkPresentModeKHR present_mode = v_Renderer.state.swapchain.present_mode;
|
|
VkFormat format = v_Renderer.state.swapchain.format;
|
|
VkColorSpaceKHR color_space = v_Renderer.state.swapchain.color_space;
|
|
Arena *arena = vFrameArena();
|
|
|
|
VkSurfaceCapabilitiesKHR capabilities;
|
|
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(phys_device, surface, &capabilities);
|
|
|
|
// Maybe reconsider handling window sizing within here and only handle it from events themselves
|
|
// causes issues when the window size is out of sync with the current swapchain
|
|
VkExtent2D extent;
|
|
u32 width = v_Renderer.state.swapchain.extent.width;
|
|
u32 height = v_Renderer.state.swapchain.extent.height;
|
|
|
|
extent.width = Clampu32((u32)width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width);
|
|
extent.height = Clampu32((u32)height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height);
|
|
|
|
if (present_mode == INT_MAX || format == INT_MAX || color_space == INT_MAX)
|
|
{
|
|
u32 format_count;
|
|
vkGetPhysicalDeviceSurfaceFormatsKHR(phys_device, surface, &format_count, NULL);
|
|
VkSurfaceFormatKHR *formats = ArenaAlloc(arena, sizeof(VkSurfaceFormatKHR) * format_count);
|
|
vkGetPhysicalDeviceSurfaceFormatsKHR(phys_device, surface, &format_count, formats);
|
|
|
|
format = formats[0].format;
|
|
color_space = formats[0].colorSpace;
|
|
|
|
u32 present_count;
|
|
vkGetPhysicalDeviceSurfacePresentModesKHR(phys_device, surface, &present_count, NULL);
|
|
VkPresentModeKHR *present_modes = ArenaAlloc(arena, sizeof(VkSurfaceFormatKHR) * present_count);
|
|
vkGetPhysicalDeviceSurfacePresentModesKHR(phys_device, surface, &present_count, present_modes);
|
|
|
|
for (u32 i = 0; i < present_count; i++)
|
|
{
|
|
if (present_modes[i] == VK_PRESENT_MODE_MAILBOX_KHR)
|
|
{
|
|
present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (present_mode != VK_PRESENT_MODE_MAILBOX_KHR)
|
|
present_mode = VK_PRESENT_MODE_FIFO_KHR;
|
|
}
|
|
|
|
g_Swap_Info.minImageCount = capabilities.minImageCount + 1;
|
|
g_Swap_Info.surface = surface;
|
|
g_Swap_Info.imageFormat = format;
|
|
g_Swap_Info.imageColorSpace = color_space;
|
|
g_Swap_Info.imageExtent = extent;
|
|
g_Swap_Info.preTransform = capabilities.currentTransform;
|
|
g_Swap_Info.presentMode = present_mode;
|
|
|
|
VkResult result;
|
|
result = vkCreateSwapchainKHR(device, &g_Swap_Info, NULL, &v_Renderer.handles.swapchain);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
u32 image_count;
|
|
vkGetSwapchainImagesKHR(device, v_Renderer.handles.swapchain, &image_count, NULL);
|
|
VkImage *sc_images = ArenaAlloc(v_Renderer.mem.perm_arena, sizeof(VkImage) * image_count);
|
|
VkImageView *sc_views = ArenaAlloc(v_Renderer.mem.perm_arena, sizeof(VkImageView) * image_count);
|
|
vkGetSwapchainImagesKHR(device, v_Renderer.handles.swapchain, &image_count, sc_images);
|
|
|
|
InitArrayType(v_Renderer.images.sc, v_Renderer.mem.perm_arena, vImageView, image_count);
|
|
|
|
for (u32 i = 0; i < image_count; i++)
|
|
{
|
|
v_Renderer.images.sc.data[i].image.image = sc_images[i];
|
|
g_Swap_View_Info.image = sc_images[i];
|
|
g_Swap_View_Info.format = format;
|
|
|
|
result = vkCreateImageView(device, &g_Swap_View_Info, NULL, &v_Renderer.images.sc.data[i].view);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
}
|
|
|
|
v_Renderer.images.sc.length = image_count;
|
|
|
|
v_Renderer.state.swapchain.format = format;
|
|
v_Renderer.state.swapchain.color_space = color_space;
|
|
v_Renderer.state.swapchain.present_mode = present_mode;
|
|
v_Renderer.state.swapchain.extent.width = extent.width;
|
|
v_Renderer.state.swapchain.extent.height = extent.height;
|
|
v_Renderer.state.swapchain.extent.depth = 1;
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vDrawImagesInit()
|
|
{
|
|
b32 success = true;
|
|
VkResult result;
|
|
|
|
VkFormat image_format = vImageFormatGet();
|
|
VkExtent3D extent = v_Renderer.state.swapchain.extent;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
|
|
VmaAllocationCreateInfo alloc_create_info = {
|
|
.usage = VMA_MEMORY_USAGE_GPU_ONLY,
|
|
.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
|
|
};
|
|
|
|
// Draw vImage
|
|
g_Draw_Image_Info.format = image_format;
|
|
g_Draw_Image_Info.extent = extent;
|
|
|
|
result = vmaCreateImage(v_Renderer.handles.vma_alloc, &g_Draw_Image_Info,
|
|
&alloc_create_info, &v_Renderer.images.draw.image.image, &v_Renderer.images.draw.image.alloc, NULL);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
// Draw vImage View
|
|
g_Draw_View_Info.image = v_Renderer.images.draw.image.image;
|
|
g_Draw_View_Info.format = image_format;
|
|
|
|
result = vkCreateImageView(device, &g_Draw_View_Info, NULL, &v_Renderer.images.draw.view);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
// Depth vImage
|
|
g_Depth_Image_Info.extent = extent;
|
|
|
|
result = vmaCreateImage(v_Renderer.handles.vma_alloc, &g_Depth_Image_Info,
|
|
&alloc_create_info, &v_Renderer.images.depth.image.image, &v_Renderer.images.depth.image.alloc, NULL);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
// Depth vImage View
|
|
g_Depth_View_Info.image = v_Renderer.images.depth.image.image;
|
|
result = vkCreateImageView(device, &g_Depth_View_Info, NULL, &v_Renderer.images.depth.view);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
// Setting values
|
|
v_Renderer.state.swapchain.extent = extent;
|
|
v_Renderer.images.depth.image.format = g_Depth_Image_Info.format;
|
|
v_Renderer.images.depth.image.layout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
v_Renderer.images.draw.image.format = g_Draw_Image_Info.format;
|
|
v_Renderer.images.draw.image.layout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
return success;
|
|
}
|
|
|
|
static VkFormat
|
|
vImageFormatGet()
|
|
{
|
|
VkPhysicalDevice phys_device = v_Renderer.handles.phys_device;
|
|
VkImageType image_type = VK_IMAGE_TYPE_2D;
|
|
VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
|
|
VkImageUsageFlags usage_flags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
|
|
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
|
|
VK_IMAGE_USAGE_STORAGE_BIT |
|
|
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
|
|
|
|
VkFormat format = 0;
|
|
for (i32 i = 0; i < Len(VK_IMAGE_FORMATS); i++)
|
|
{
|
|
VkImageFormatProperties properties;
|
|
VkResult result;
|
|
result = vkGetPhysicalDeviceImageFormatProperties(phys_device, VK_IMAGE_FORMATS[i], image_type,
|
|
tiling, usage_flags, 0, &properties);
|
|
if (result == VK_ERROR_FORMAT_NOT_SUPPORTED)
|
|
continue;
|
|
|
|
if (result == VK_SUCCESS)
|
|
{
|
|
format = VK_IMAGE_FORMATS[i];
|
|
break;
|
|
}
|
|
}
|
|
|
|
Assert(format != 0, "[Error] unable to find appropriate image format");
|
|
|
|
return format;
|
|
}
|
|
|
|
static b32
|
|
vDescriptorsInit()
|
|
{
|
|
b32 success = true;
|
|
|
|
VkDevice device = v_Renderer.handles.device;
|
|
VkResult result;
|
|
vDescBindings *bindings = v_Renderer.desc_bindings;
|
|
|
|
result = vkCreateDescriptorPool(device, &g_Desc_Pool_Info, NULL, &v_Renderer.handles.desc_pool);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
result = vkCreateDescriptorSetLayout(device, &g_Shared_Desc_Layout_Info, NULL, &v_Renderer.handles.desc_layouts[vDT_SHARED]);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
for (u32 i = vDT_SAMPLED_IMAGE; i < vDT_MAX; i++)
|
|
{
|
|
g_Bindless_Desc_Binding.descriptorType = g_Desc_Type_Map[i];
|
|
result = vkCreateDescriptorSetLayout(device, &g_Bindless_Desc_Info, NULL, &v_Renderer.handles.desc_layouts[i]);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
}
|
|
|
|
g_Descriptor_Alloc_Info.descriptorPool = v_Renderer.handles.desc_pool;
|
|
g_Descriptor_Alloc_Info.pSetLayouts = v_Renderer.handles.desc_layouts;
|
|
|
|
result = vkAllocateDescriptorSets(device, &g_Descriptor_Alloc_Info, v_Renderer.handles.desc_sets);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
g_Pipeline_Layout_Info.setLayoutCount = vDT_MAX;
|
|
g_Pipeline_Layout_Info.pSetLayouts = v_Renderer.handles.desc_layouts;
|
|
|
|
result = vkCreatePipelineLayout(device, &g_Pipeline_Layout_Info, NULL, &v_Renderer.handles.pipeline_layout);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
|
|
if (success)
|
|
{
|
|
for (u32 i = 0; i < vDT_MAX; i++)
|
|
{
|
|
// FREE MIGHT BE NULL
|
|
bindings[i].free = ArenaAlloc(v_Renderer.mem.perm_arena, sizeof(u32) * DESC_MAX_BINDINGS);
|
|
|
|
HashTableInit(&bindings[i].lookup_table, 6);
|
|
|
|
u32 free_count = 0;
|
|
for (i32 j = DESC_MAX_BINDINGS-1; j >= 0; j--)
|
|
{
|
|
bindings[i].free[j] = free_count++;
|
|
}
|
|
|
|
bindings[i].free_count = free_count;
|
|
}
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
VkPhysicalDeviceProperties properties;
|
|
vkGetPhysicalDeviceProperties(v_Renderer.handles.phys_device, &properties);
|
|
|
|
VkSamplerCreateInfo sampler_create_info = {
|
|
.sType = STYPE(SAMPLER_CREATE_INFO),
|
|
.magFilter = VK_FILTER_NEAREST,
|
|
.minFilter = VK_FILTER_NEAREST,
|
|
.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
|
|
.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
|
|
.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
|
|
.anisotropyEnable = VK_TRUE,
|
|
.maxAnisotropy = properties.limits.maxSamplerAnisotropy,
|
|
.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
|
|
.compareOp = VK_COMPARE_OP_ALWAYS,
|
|
.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR,
|
|
};
|
|
|
|
result = vkCreateSampler(v_Renderer.handles.device, &sampler_create_info, NULL, &v_Renderer.handles.nearest_sampler);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vDescriptorsInit failure: vkCreateSampler failed: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
VkDescriptorImageInfo sampler_info = {
|
|
.sampler = v_Renderer.handles.nearest_sampler,
|
|
};
|
|
|
|
VkWriteDescriptorSet desc_write = {
|
|
.sType = STYPE(WRITE_DESCRIPTOR_SET),
|
|
.dstSet = v_Renderer.handles.desc_sets[vDT_SHARED],
|
|
.dstBinding = 3,
|
|
.descriptorCount = 1,
|
|
.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER,
|
|
.pImageInfo = &sampler_info,
|
|
};
|
|
|
|
vkUpdateDescriptorSets(v_Renderer.handles.device, 1, &desc_write, 0, NULL);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vPipelinesInit()
|
|
{
|
|
b32 success = true;
|
|
VkResult result;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
|
|
/*
|
|
* SHARED
|
|
*/
|
|
|
|
VkPipelineRenderingCreateInfo pipeline_render_info = {
|
|
.sType = STYPE(PIPELINE_RENDERING_CREATE_INFO),
|
|
.colorAttachmentCount = 1,
|
|
.pColorAttachmentFormats = &v_Renderer.images.draw.image.format,
|
|
.depthAttachmentFormat = v_Renderer.images.depth.image.format,
|
|
};
|
|
|
|
/*
|
|
* QUAD PIPELINE START
|
|
*/
|
|
|
|
Asset quad_vert_shader = apLoad("shaders/quad.vert");
|
|
Asset quad_frag_shader = apLoad("shaders/quad.frag");
|
|
|
|
VkShaderModule cube_vert, cube_frag;
|
|
success &= vShaderModuleInit(quad_vert_shader.bytes, quad_vert_shader.len, &cube_vert);
|
|
success &= vShaderModuleInit(quad_frag_shader.bytes, quad_frag_shader.len, &cube_frag);
|
|
|
|
VkPipelineShaderStageCreateInfo cube_shader_stages[] = {
|
|
{
|
|
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
|
|
.stage = VK_SHADER_STAGE_VERTEX_BIT,
|
|
.module = cube_vert,
|
|
.pName = "main",
|
|
},
|
|
{
|
|
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
|
|
.stage = VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
.module = cube_frag,
|
|
.pName = "main",
|
|
},
|
|
};
|
|
|
|
g_CUBE_Create_Info.pStages = cube_shader_stages;
|
|
g_CUBE_Create_Info.stageCount = Len(cube_shader_stages);
|
|
g_CUBE_Create_Info.layout = v_Renderer.handles.pipeline_layout;
|
|
g_CUBE_Create_Info.pNext = &pipeline_render_info;
|
|
|
|
result = vkCreateGraphicsPipelines(device, 0, 1, &g_CUBE_Create_Info, NULL, &v_Renderer.handles.pipelines[rPIPELINE_CUBE]);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printf("vkCreateGraphicsPipelines failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
vkDestroyShaderModule(device, cube_vert, NULL);
|
|
vkDestroyShaderModule(device, cube_frag, NULL);
|
|
|
|
apUnload("shaders/quad.vert");
|
|
apUnload("shaders/quad.frag");
|
|
|
|
/*
|
|
* GUI PIPELINE START
|
|
*/
|
|
|
|
Asset gui_vert_shader = apLoad("shaders/gui.vert");
|
|
Asset gui_frag_shader = apLoad("shaders/gui.frag");
|
|
|
|
VkShaderModule gui_vert, gui_frag;
|
|
success &= vShaderModuleInit(gui_vert_shader.bytes, gui_vert_shader.len, &gui_vert);
|
|
success &= vShaderModuleInit(gui_frag_shader.bytes, gui_frag_shader.len, &gui_frag);
|
|
|
|
VkPipelineShaderStageCreateInfo gui_shader_stages[] = {
|
|
{
|
|
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
|
|
.stage = VK_SHADER_STAGE_VERTEX_BIT,
|
|
.module = gui_vert,
|
|
.pName = "main",
|
|
},
|
|
{
|
|
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
|
|
.stage = VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
.module = gui_frag,
|
|
.pName = "main",
|
|
},
|
|
};
|
|
|
|
g_GUI_Create_Info.pStages = gui_shader_stages;
|
|
g_GUI_Create_Info.stageCount = Len(gui_shader_stages);
|
|
g_GUI_Create_Info.layout = v_Renderer.handles.pipeline_layout;
|
|
g_GUI_Create_Info.pNext = &pipeline_render_info;
|
|
|
|
result = vkCreateGraphicsPipelines(device, 0, 1, &g_GUI_Create_Info, NULL, &v_Renderer.handles.pipelines[rPIPELINE_GUI]);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkCreateGraphicsPipelines failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
vkDestroyShaderModule(device, gui_vert, NULL);
|
|
vkDestroyShaderModule(device, gui_frag, NULL);
|
|
|
|
apUnload("shaders/gui.vert");
|
|
apUnload("shaders/gui.frag");
|
|
|
|
/*
|
|
* PBR PIPELINE START
|
|
*/
|
|
|
|
Asset pbr_vert_shader = apLoad("shaders/pbr.vert");
|
|
Asset pbr_frag_shader = apLoad("shaders/pbr.frag");
|
|
|
|
VkShaderModule pbr_vert, pbr_frag;
|
|
success &= vShaderModuleInit(pbr_vert_shader.bytes, pbr_vert_shader.len, &pbr_vert);
|
|
success &= vShaderModuleInit(pbr_frag_shader.bytes, pbr_frag_shader.len, &pbr_frag);
|
|
|
|
VkPipelineShaderStageCreateInfo pbr_shader_stages[] = {
|
|
{
|
|
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
|
|
.stage = VK_SHADER_STAGE_VERTEX_BIT,
|
|
.module = pbr_vert,
|
|
.pName = "main",
|
|
},
|
|
{
|
|
.sType = STYPE(PIPELINE_SHADER_STAGE_CREATE_INFO),
|
|
.stage = VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
.module = pbr_frag,
|
|
.pName = "main",
|
|
},
|
|
};
|
|
|
|
g_PBR_Create_Info.pStages = pbr_shader_stages;
|
|
g_PBR_Create_Info.stageCount = Len(pbr_shader_stages);
|
|
g_PBR_Create_Info.layout = v_Renderer.handles.pipeline_layout;
|
|
g_PBR_Create_Info.pNext = &pipeline_render_info;
|
|
|
|
result = vkCreateGraphicsPipelines(device, 0, 1, &g_PBR_Create_Info, NULL, &v_Renderer.handles.pipelines[rPIPELINE_PBR]);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printfln("vkCreateGraphicsPipelineFailure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
vkDestroyShaderModule(device, pbr_vert, NULL);
|
|
vkDestroyShaderModule(device, pbr_frag, NULL);
|
|
|
|
apUnload("shaders/pbr.vert");
|
|
apUnload("shaders/pbr.frag");
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vShaderModuleInit(u8 *bytes, u32 len, VkShaderModule *module)
|
|
{
|
|
VkResult result;
|
|
b32 success = true;
|
|
|
|
VkShaderModuleCreateInfo module_info = {
|
|
.sType = STYPE(SHADER_MODULE_CREATE_INFO),
|
|
.codeSize = len,
|
|
.pCode = (u32 *)bytes,
|
|
};
|
|
|
|
result = vkCreateShaderModule(v_Renderer.handles.device, &module_info, NULL, module);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Printf("vkCreateShaderModule failure: %s", vVkResultStr(result));
|
|
success = false;
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
static b32
|
|
vBuffersInit()
|
|
{
|
|
vRBuffers *buf = &v_Renderer.buffers;
|
|
Arena *arena = v_Renderer.mem.perm_arena;
|
|
|
|
HashTableInit(&buf->buffers, 8);
|
|
HashTableInit(&buf->images, 8);
|
|
|
|
buf->tex_destroy_queue.data = MakeArray(arena, b8 *, FRAME_OVERLAP);
|
|
buf->tex_destroy_queue.length = FRAME_OVERLAP;
|
|
|
|
for (u32 i = 0; i < FRAME_OVERLAP; i++)
|
|
{
|
|
InitArrayType(buf->frame_buffers[i], arena, vBuffer *, 128);
|
|
InitArrayType(buf->frame_images[i], arena, vImageView *, 128);
|
|
|
|
buf->tex_destroy_queue.data[i] = MakeArray(arena, b8, TEXTURE_ASSET_MAX);
|
|
MemZero(buf->tex_destroy_queue.data[i], sizeof(b8) * TEXTURE_ASSET_MAX);
|
|
}
|
|
|
|
|
|
b32 success = true;
|
|
VkResult result;
|
|
|
|
if (success)
|
|
{
|
|
result = vBufferCreate(&buf->gui_vert.alloc, rRBT_VERTEX | rRBT_HOST, VERTEX_BUFFER_CAP);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
result = vBufferCreate(&buf->gui_idx.alloc, rRBT_INDEX | rRBT_HOST, INDEX_BUFFER_CAP); // TODO: figure out ratio of memory alloc from vertex -> index
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
result = vBufferCreate(&buf->transfer.alloc, rRBT_STAGING, TRANSFER_BUFFER_CAP);
|
|
if (result != VK_SUCCESS)
|
|
success = false;
|
|
}
|
|
|
|
buf->gui_vert.ptr = vMapBuffer(buf->gui_vert.alloc.alloc);
|
|
buf->gui_vert.cap = MB(32);
|
|
buf->gui_idx.ptr = vMapBuffer(buf->gui_idx.alloc.alloc);
|
|
buf->gui_idx.cap = MB(8);
|
|
buf->transfer.ptr = vMapBuffer(buf->transfer.alloc.alloc);
|
|
buf->transfer.cap = MB(64);
|
|
|
|
return success;
|
|
}
|
|
|
|
static void
|
|
vLoaderStartThreads()
|
|
{
|
|
v_Renderer.async.thread = pThreadInit(vLoaderStart, NULL);
|
|
}
|
|
|
|
// ::Vulkan::Init::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Async::Functions::Start::
|
|
|
|
static void
|
|
vTransferUpload(vTransfer **transfers, u32 count)
|
|
{
|
|
VkCommandPool pool = v_Renderer.imm.pool;
|
|
VkCommandBuffer buffer = v_Renderer.imm.buffer;
|
|
VkFence fence = v_Renderer.imm.fence;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
VkQueue queue = v_Renderer.handles.tfer_queue;
|
|
vMappedBuffer *transfer = &v_Renderer.buffers.transfer;
|
|
|
|
rawptr data_ptr = NULL;
|
|
u64 data_len = 0;
|
|
|
|
rawptr ptr = transfer->ptr;
|
|
u64 ptr_pos = 0;
|
|
u64 transfer_size = 0;
|
|
VkDeviceSize offset = 0;
|
|
b32 imm_started = false;
|
|
b32 mesh_uniform_uploaded = false;
|
|
u32 i = 0;
|
|
for (;;)
|
|
{
|
|
if (i == count)
|
|
break;
|
|
|
|
if (data_ptr == NULL)
|
|
{
|
|
if (transfers[i]->type == vTT_IMAGE)
|
|
{
|
|
data_len = transfers[i]->w * transfers[i]->h * transfers[i]->ch;
|
|
}
|
|
else
|
|
{
|
|
data_len = transfers[i]->size;
|
|
}
|
|
|
|
data_ptr = transfers[i]->data;
|
|
}
|
|
|
|
if (ptr_pos == transfer->cap)
|
|
{
|
|
vImmSubmitFinish(device, fence, buffer, queue);
|
|
vkWaitForFences(device, 1, &fence, VK_TRUE, 999999999);
|
|
|
|
ptr = transfer->ptr;
|
|
offset = 0;
|
|
imm_started = false;
|
|
}
|
|
|
|
if (!imm_started)
|
|
{
|
|
Assert(vImmSubmitBegin(device, fence, buffer), "vTransferUpload failure: vImmSubmitBegin failed"); // TODO: handle this
|
|
imm_started = true;
|
|
}
|
|
|
|
if (!mesh_uniform_uploaded && transfers[i]->type == vTT_MESH)
|
|
{
|
|
VkBufferDeviceAddressInfo addr_info = {
|
|
.sType = STYPE(BUFFER_DEVICE_ADDRESS_INFO),
|
|
.buffer = transfers[i]->mesh->mesh.buffer,
|
|
};
|
|
|
|
vMesh mesh = {
|
|
.vertices = vkGetBufferDeviceAddress(device, &addr_info),
|
|
};
|
|
|
|
VkBufferCopy copy = {
|
|
.srcOffset = offset,
|
|
.dstOffset = cast(VkDeviceSize, 0),
|
|
.size = sizeof(vMesh),
|
|
};
|
|
|
|
MemCpy(ptr, &mesh, sizeof(vMesh));
|
|
|
|
ptr = PtrAdd(ptr, sizeof(vMesh));
|
|
ptr_pos += sizeof(vMesh);
|
|
offset += sizeof(vMesh);
|
|
|
|
vkCmdCopyBuffer(buffer, transfer->alloc.buffer, transfers[i]->mesh->uniform.buffer, 1, ©);
|
|
|
|
mesh_uniform_uploaded = true;
|
|
}
|
|
|
|
if (transfers[i]->type != vTT_NONE)
|
|
{
|
|
u64 remaining = Diff(TRANSFER_BUFFER_CAP, ptr_pos);
|
|
transfer_size = data_len;
|
|
if (transfer_size > remaining)
|
|
transfer_size = remaining;
|
|
|
|
MemCpy(ptr, data_ptr, transfer_size);
|
|
|
|
ptr = PtrAdd(ptr, transfer_size);
|
|
PtrAddAdjustLen(data_ptr, data_len, transfer_size);
|
|
ptr_pos += transfer_size;
|
|
}
|
|
|
|
// TODO:
|
|
/* == Transfers ==
|
|
* - offsets for target buffer
|
|
* - batch copy commands where possible
|
|
*/
|
|
if (transfers[i]->type == vTT_IMAGE)
|
|
{
|
|
VkBufferImageCopy copy = {
|
|
.bufferRowLength = transfers[i]->w,
|
|
.bufferImageHeight = transfers[i]->h,
|
|
.imageSubresource = {
|
|
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
|
.layerCount = 1,
|
|
},
|
|
.imageExtent = {
|
|
.width = transfers[i]->w,
|
|
.height = transfers[i]->h,
|
|
.depth = 1,
|
|
},
|
|
.bufferOffset = offset,
|
|
};
|
|
|
|
vImageTransitionLayout(buffer,
|
|
transfers[i]->image,
|
|
VK_IMAGE_LAYOUT_UNDEFINED,
|
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
|
|
|
vkCmdCopyBufferToImage(buffer,
|
|
transfer->alloc.buffer,
|
|
transfers[i]->image,
|
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
|
1,
|
|
©);
|
|
|
|
vImageTransitionLayout(buffer,
|
|
transfers[i]->image,
|
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
|
|
|
offset += (VkDeviceSize)transfer_size;
|
|
}
|
|
else if (transfers[i]->type == vTT_BUFFER || transfers[i]->type == vTT_MESH)
|
|
{
|
|
VkBuffer target_buf;
|
|
if (transfers[i]->type == vTT_BUFFER)
|
|
target_buf = transfers[i]->buffer;
|
|
else if (transfers[i]->type == vTT_MESH)
|
|
target_buf = transfers[i]->mesh->mesh.buffer;
|
|
|
|
VkBufferCopy copy = {
|
|
.srcOffset = offset,
|
|
.dstOffset = 0,
|
|
.size = transfer_size,
|
|
};
|
|
|
|
Printfln("transfer size %llu", transfer_size);
|
|
|
|
vkCmdCopyBuffer(buffer, transfer->alloc.buffer, target_buf, 1, ©);
|
|
|
|
offset += cast(VkDeviceSize, transfer_size);
|
|
}
|
|
|
|
if (data_len == 0)
|
|
{
|
|
data_ptr = NULL;
|
|
data_len = 0;
|
|
transfer_size = 0;
|
|
i += 1;
|
|
mesh_uniform_uploaded = false;
|
|
}
|
|
}
|
|
|
|
if (imm_started)
|
|
vImmSubmitFinish(device, fence, buffer, queue);
|
|
}
|
|
|
|
#ifdef __linux__
|
|
|
|
void *
|
|
vLoaderStart(void *i)
|
|
{
|
|
vLoader();
|
|
pthread_exit(NULL);
|
|
}
|
|
|
|
#elif _WIN32
|
|
|
|
DWORD WINAPI
|
|
vLoaderStart(LPVOID thread_data)
|
|
{
|
|
vLoader();
|
|
return 0;
|
|
}
|
|
|
|
#endif
|
|
|
|
static void
|
|
vLoaderWake()
|
|
{
|
|
pThreadWake(&v_Renderer.async.thread);
|
|
}
|
|
|
|
static void
|
|
vLoader()
|
|
{
|
|
pThread *self = &v_Renderer.async.thread;
|
|
|
|
for (;;)
|
|
{
|
|
TicketMutLock(&v_Renderer.upload.mut);
|
|
u32 job_count = JobQueueGetCount(&v_Renderer.upload.job_queue);
|
|
if (job_count > 0)
|
|
{
|
|
vTransfer **transfers = MakeArray(vFrameArena(), vTransfer *, job_count);
|
|
|
|
u32 unqueued_count = 0;
|
|
for (u32 i = 0; i < job_count; i++)
|
|
{
|
|
unqueued_count += 1;
|
|
transfers[i] = v_Renderer.upload.transfers[i];
|
|
}
|
|
|
|
JobQueueMarkUnqueued(&v_Renderer.upload.job_queue, unqueued_count);
|
|
|
|
TicketMutUnlock(&v_Renderer.upload.mut);
|
|
|
|
vTransferUpload(transfers, job_count);
|
|
|
|
for (u32 i = 0; i < job_count; i++)
|
|
{
|
|
if (transfers[i]->type == vTT_IMAGE)
|
|
//apUnloadTexture(transfers[i]->asset_id);
|
|
;
|
|
}
|
|
|
|
JobQueueMarkCompleted(&v_Renderer.upload.job_queue, job_count);
|
|
}
|
|
else if (job_count == 0)
|
|
{
|
|
pAtomicStoreB32(&v_Renderer.async.sleeping, 1);
|
|
TicketMutUnlock(&v_Renderer.upload.mut);
|
|
pThreadSuspend(self);
|
|
pAtomicStoreB32(&v_Renderer.async.sleeping, 0);
|
|
}
|
|
else
|
|
{
|
|
TicketMutUnlock(&v_Renderer.upload.mut);
|
|
pThreadKill();
|
|
}
|
|
}
|
|
|
|
pThreadKill();
|
|
}
|
|
|
|
// ::Vulkan::Async::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::CleanUp::Functions::Start::
|
|
|
|
static void
|
|
vSwapchainDestroy()
|
|
{
|
|
vImageViewArray images = v_Renderer.images.sc;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
VkSwapchainKHR swapchain = v_Renderer.handles.swapchain;
|
|
|
|
for (u32 i = 0; i < images.length; i++)
|
|
{
|
|
vkDestroyImageView(device, images.data[i].view, NULL);
|
|
}
|
|
|
|
vkDestroySwapchainKHR(device, swapchain, NULL);
|
|
}
|
|
|
|
static void
|
|
vDrawImagesDestroy()
|
|
{
|
|
vRImages *images = &v_Renderer.images;
|
|
VkDevice device = v_Renderer.handles.device;
|
|
VmaAllocator vma_alloc = v_Renderer.handles.vma_alloc;
|
|
|
|
vkDestroyImageView(device, images->draw.view, NULL);
|
|
vmaDestroyImage(vma_alloc, images->draw.image.image, images->draw.image.alloc);
|
|
|
|
vkDestroyImageView(device, images->depth.view, NULL);
|
|
vmaDestroyImage(vma_alloc, images->depth.image.image, images->depth.image.alloc);
|
|
}
|
|
|
|
// ::Vulkan::CleanUp::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Logging::Functions::Start::
|
|
|
|
void
|
|
vInfo(const char *str)
|
|
{
|
|
Printfln("[INFO] %s", str);
|
|
}
|
|
|
|
void
|
|
vWarn(const char *str)
|
|
{
|
|
Printfln("[WARN] %s", str);
|
|
}
|
|
|
|
void
|
|
vError(const char *str)
|
|
{
|
|
Printfln("[ERROR] %s", str);
|
|
}
|
|
|
|
// ::Vulkan::Logging::Functions::End::
|
|
|
|
|
|
|
|
// ::Vulkan::Debug::Functions::Start::
|
|
|
|
const char *
|
|
vVkResultStr(VkResult result)
|
|
{
|
|
switch (result)
|
|
{
|
|
case VK_SUCCESS:
|
|
return "VK_SUCCESS";
|
|
case VK_NOT_READY:
|
|
return "VK_NOT_READY";
|
|
case VK_TIMEOUT:
|
|
return "VK_TIMEOUT";
|
|
case VK_EVENT_SET:
|
|
return "VK_EVENT_SET";
|
|
case VK_EVENT_RESET:
|
|
return "VK_EVENT_RESET";
|
|
case VK_INCOMPLETE:
|
|
return "VK_INCOMPLETE";
|
|
case VK_ERROR_OUT_OF_HOST_MEMORY:
|
|
return "VK_ERROR_OUT_OF_HOST_MEMORY";
|
|
case VK_ERROR_OUT_OF_DEVICE_MEMORY:
|
|
return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
|
|
case VK_ERROR_INITIALIZATION_FAILED:
|
|
return "VK_ERROR_INITIALIZATION_FAILED";
|
|
case VK_ERROR_DEVICE_LOST:
|
|
return "VK_ERROR_DEVICE_LOST";
|
|
case VK_ERROR_MEMORY_MAP_FAILED:
|
|
return "VK_ERROR_MEMORY_MAP_FAILED";
|
|
case VK_ERROR_LAYER_NOT_PRESENT:
|
|
return "VK_ERROR_LAYER_NOT_PRESENT";
|
|
case VK_ERROR_EXTENSION_NOT_PRESENT:
|
|
return "VK_ERROR_EXTENSION_NOT_PRESENT";
|
|
case VK_ERROR_FEATURE_NOT_PRESENT:
|
|
return "VK_ERROR_FEATURE_NOT_PRESENT";
|
|
case VK_ERROR_INCOMPATIBLE_DRIVER:
|
|
return "VK_ERROR_INCOMPATIBLE_DRIVER";
|
|
case VK_ERROR_TOO_MANY_OBJECTS:
|
|
return "VK_ERROR_TOO_MANY_OBJECTS";
|
|
case VK_ERROR_FORMAT_NOT_SUPPORTED:
|
|
return "VK_ERROR_FORMAT_NOT_SUPPORTED";
|
|
case VK_ERROR_FRAGMENTED_POOL:
|
|
return "VK_ERROR_FRAGMENTED_POOL";
|
|
case VK_ERROR_UNKNOWN:
|
|
return "VK_ERROR_UNKNOWN";
|
|
case VK_ERROR_OUT_OF_POOL_MEMORY:
|
|
return "VK_ERROR_OUT_OF_POOL_MEMORY";
|
|
case VK_ERROR_INVALID_EXTERNAL_HANDLE:
|
|
return "VK_ERROR_INVALID_EXTERNAL_HANDLE";
|
|
case VK_ERROR_FRAGMENTATION:
|
|
return "VK_ERROR_FRAGMENTATION";
|
|
case VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS:
|
|
return "VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS";
|
|
case VK_PIPELINE_COMPILE_REQUIRED:
|
|
return "VK_PIPELINE_COMPILE_REQUIRED";
|
|
case VK_ERROR_SURFACE_LOST_KHR:
|
|
return "VK_ERROR_SURFACE_LOST_KHR";
|
|
case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:
|
|
return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
|
|
case VK_SUBOPTIMAL_KHR:
|
|
return "VK_SUBOPTIMAL_KHR";
|
|
case VK_ERROR_OUT_OF_DATE_KHR:
|
|
return "VK_ERROR_OUT_OF_DATE_KHR";
|
|
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:
|
|
return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
|
|
case VK_ERROR_VALIDATION_FAILED_EXT:
|
|
return "VK_ERROR_VALIDATION_FAILED_EXT";
|
|
case VK_ERROR_INVALID_SHADER_NV:
|
|
return "VK_ERROR_INVALID_SHADER_NV";
|
|
#ifdef VK_ENABLE_BETA_EXTENSIONS
|
|
case VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR:
|
|
return "VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR";
|
|
#endif
|
|
#ifdef VK_ENABLE_BETA_EXTENSIONS
|
|
case VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR:
|
|
return "VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR";
|
|
#endif
|
|
#ifdef VK_ENABLE_BETA_EXTENSIONS
|
|
case VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR:
|
|
return "VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR";
|
|
#endif
|
|
#ifdef VK_ENABLE_BETA_EXTENSIONS
|
|
case VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR:
|
|
return "VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR";
|
|
#endif
|
|
#ifdef VK_ENABLE_BETA_EXTENSIONS
|
|
case VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR:
|
|
return "VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR";
|
|
#endif
|
|
#ifdef VK_ENABLE_BETA_EXTENSIONS
|
|
case VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR:
|
|
return "VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR";
|
|
#endif
|
|
case VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT:
|
|
return "VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT";
|
|
case VK_ERROR_NOT_PERMITTED_KHR:
|
|
return "VK_ERROR_NOT_PERMITTED_KHR";
|
|
case VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT:
|
|
return "VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT";
|
|
case VK_THREAD_IDLE_KHR:
|
|
return "VK_THREAD_IDLE_KHR";
|
|
case VK_THREAD_DONE_KHR:
|
|
return "VK_THREAD_DONE_KHR";
|
|
case VK_OPERATION_DEFERRED_KHR:
|
|
return "VK_OPERATION_DEFERRED_KHR";
|
|
case VK_OPERATION_NOT_DEFERRED_KHR:
|
|
return "VK_OPERATION_NOT_DEFERRED_KHR";
|
|
case VK_ERROR_COMPRESSION_EXHAUSTED_EXT:
|
|
return "VK_ERROR_COMPRESSION_EXHAUSTED_EXT";
|
|
case VK_RESULT_MAX_ENUM:
|
|
return "VK_RESULT_MAX_ENUM";
|
|
default:
|
|
return "??????";
|
|
}
|
|
}
|
|
|
|
static VKAPI_ATTR VkBool32
|
|
vDebugCallback(
|
|
VkDebugUtilsMessageSeverityFlagBitsEXT message_severity,
|
|
VkDebugUtilsMessageTypeFlagsEXT message_type,
|
|
const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData,
|
|
void *pUserData
|
|
)
|
|
{
|
|
char *ms, *mt;
|
|
|
|
switch (message_severity) {
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT:
|
|
ms = (char *)"VERBOSE";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT:
|
|
ms = (char *)"INFO";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT:
|
|
ms = (char *)"WARNING";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT:
|
|
ms = (char *)"ERROR";
|
|
break;
|
|
default:
|
|
ms = (char *)"UNKNOWN";
|
|
break;
|
|
}
|
|
|
|
switch (message_type) {
|
|
case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
|
|
mt = (char *)"General";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT:
|
|
mt = (char *)"Validation";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
|
|
mt = (char *)"Validation | General";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
|
|
mt = (char *)"Performance";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
|
|
mt = (char *)"General | Performance";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT:
|
|
mt = (char *)"Validation | Performance";
|
|
break;
|
|
case VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT:
|
|
mt = (char *)"General | Validation | Performance";
|
|
break;
|
|
default:
|
|
mt = (char *)"Unknown";
|
|
break;
|
|
}
|
|
|
|
Printf("[%s: %s]\n%s\n", ms, mt, pCallbackData->pMessage);
|
|
|
|
return VK_FALSE;
|
|
}
|
|
|
|
static b32
|
|
vValidationSupported()
|
|
{
|
|
b32 success = false;
|
|
Arena *arena = vFrameArena();
|
|
|
|
u32 count;
|
|
vkEnumerateInstanceLayerProperties(&count, NULL);
|
|
Assert(count, "vValidationSupported(): vkEnumerateInstanceLayerProperties returned a count of 0");
|
|
|
|
VkLayerProperties *layers = ArenaAlloc(arena, sizeof(VkLayerProperties) * count);
|
|
vkEnumerateInstanceLayerProperties(&count, layers);
|
|
|
|
for (u32 i = 0; i < count; i++) {
|
|
if (StrEq(layers[i].layerName, _VK_VALIDATION)) {
|
|
success = true;
|
|
break;
|
|
}
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
// ::Vulkan::Debug::Functions::End::
|