Gears/src/gears/vulkan.d

3052 lines
77 KiB
D

import vulkan_funcs;
import vulkan_logging;
import aliases;
import std.stdio;
import std.algorithm.comparison;
import core.stdc.string : strcmp, memcpy;
import std.format : sformat;
import util;
import alloc;
import platform;
import assets;
import renderer;
import std.math.rounding : Ceil = ceil;
import math;
bool g_VLAYER_SUPPORT = false;
bool g_DEBUG_PRINTF = false;
const FRAME_OVERLAP = 2;
version(linux)
{
const string[] VULKAN_LIBS = [
"libvulkan.so.1",
"libvulkan.so",
];
}
version(Windows)
{
const string[] VULKAN_LIBS = [
"vulkan-1.dll",
];
}
const char*[] VK_INSTANCE_LAYERS = [];
const char*[] VK_INSTANCE_LAYERS_DEBUG = [ "VK_LAYER_KHRONOS_validation" ];
version(linux)
{
const char*[] VK_INSTANCE_EXT = [ cast(char*)VK_KHR_SURFACE_EXTENSION_NAME, cast(char*)VK_KHR_XCB_SURFACE_EXTENSION_NAME ];
const char*[] VK_INSTANCE_EXT_DEBUG = VK_INSTANCE_EXT ~ [ cast(char*)VK_EXT_DEBUG_UTILS_EXTENSION_NAME ];
}
version(Windows)
{
const char*[] VK_INSTANCE_EXT = [ cast(char*)VK_KHR_SURFACE_EXTENSION_NAME, cast(char*)VK_KHR_WIN32_SURFACE_EXTENSION_NAME ];
const char*[] VK_INSTANCE_EXT_DEBUG = VK_INSTANCE_EXT ~ [ cast(char*)VK_EXT_DEBUG_UTILS_EXTENSION_NAME ];
}
const char*[] VK_BASE_DEVICE_EXTENSIONS = [
cast(char*)VK_KHR_SWAPCHAIN_EXTENSION_NAME,
cast(char*)VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME,
cast(char*)VK_KHR_8BIT_STORAGE_EXTENSION_NAME,
];
const char*[] VK_AMD_DEVICE_EXTENSIONS = [
cast(char*)VK_AMD_SHADER_INFO_EXTENSION_NAME,
];
version(AMD_GPU)
{
debug
{
const char*[] VK_DEVICE_EXTENSIONS = VK_AMD_DEVICE_EXTENSIONS ~ VK_BASE_DEVICE_EXTENSIONS;
}
}
else
{
const char*[] VK_DEVICE_EXTENSIONS = VK_BASE_DEVICE_EXTENSIONS;
}
const VkFormat[] VK_IMAGE_FORMATS = [
VK_FORMAT_R16G16B16A16_UNORM,
VK_FORMAT_R8G8B8A8_UNORM,
];
const VkImageUsageFlags VK_DRAW_IMAGE_USAGE_FLAGS = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_STORAGE_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
enum StepInitialized : u32
{
Renderer = 1,
Instance,
Debug,
Surface,
Device,
Vma,
Buffers,
FrameStructures,
Swapchain,
DrawImages,
Descriptors,
Pipelines,
}
alias SI = StepInitialized;
enum DescType : u32
{
Shared = 0,
SampledImage,
Material,
Max,
}
alias DT = DescType;
struct Image
{
VkImage image;
VmaAllocation alloc;
VkFormat format;
VkImageLayout layout;
}
struct ImageView
{
Image base;
VkImageView view;
alias base this;
}
struct MappedBuffer(T)
{
Buffer base;
T[] data;
u64 offset;
alias base this;
}
struct Buffer
{
VkBuffer buffer;
VmaAllocation alloc;
u64 size;
}
struct DescBindings
{
u32[] free;
u64 count;
HashTable!(string, u32) lookup_table;
}
struct Vulkan
{
Arena arena;
Arena[FRAME_OVERLAP] frame_arenas;
u32 frame_index;
u32 semaphore_index;
SLList!(SI) cleanup_list;
PlatformWindow* window;
VkDebugUtilsMessengerEXT dbg_msg;
VkInstance instance;
VkSurfaceKHR surface;
VkPhysicalDevice physical_device;
VkDevice device;
VmaAllocator vma;
VkSwapchainKHR swapchain;
VkSurfaceFormatKHR surface_format;
VkPresentModeKHR present_mode;
VkExtent3D swapchain_extent;
ImageView[] present_images;
u32 image_index;
ImageView draw_image;
ImageView depth_image;
VkCommandPool[FRAME_OVERLAP] cmd_pools;
VkCommandBuffer[FRAME_OVERLAP] cmds;
VkCommandPool comp_cmd_pool;
VkCommandBuffer comp_cmd;
VkFence comp_fence;
VkSemaphore[] submit_sems;
VkSemaphore[FRAME_OVERLAP] acquire_sems;
VkFence[FRAME_OVERLAP] render_fences;
VkCommandPool imm_pool;
VkCommandBuffer imm_cmd;
VkFence imm_fence;
VkDescriptorPool desc_pool;
VkDescriptorSet[DT.max] desc_sets;
VkDescriptorSetLayout[DT.max] desc_layouts;
DescBindings[DT.max] desc_bindings;
VkSampler nearest_sampler;
VkPipeline[FRAME_OVERLAP] last_pipeline;
VkPipelineLayout pipeline_layout;
MappedBuffer!(u8) transfer_buf;
MappedBuffer!(UIVertex) ui_vert_buf;
MappedBuffer!(u32) ui_index_buf;
MappedBuffer!(GlobalUniforms) global_buf;
MappedBuffer!(ShaderUniforms) shader_buf;
QueueInfo queues;
PipelineHandle r_to_rgba_pipeline;
PipelineHandle rg_to_rgba_pipeline;
PipelineHandle rgb_to_rgba_pipeline;
VkDescriptorSet conv_desc_set;
VkDescriptorSetLayout conv_desc_layout;
VkPipelineLayout conv_pipeline_layout;
VkPipeline last_comp_pipeline;
bool compute_pass_started;
bool enable_clear_color;
VkClearColorValue clear_color;
alias queues this;
}
struct ConvPushConst
{
u32 x;
u32 y;
}
struct PipelineHandle
{
VkPipeline handle;
VkPipelineBindPoint type;
}
struct QueueInfo
{
i32 gfx_index, tfer_index;
VkQueue gfx_queue, tfer_queue;
bool single_queue;
}
Result!(Vulkan)
Init(PlatformWindow* window, u64 permanent_mem, u64 frame_mem)
{
bool success = true;
Vulkan vk = {
arena: CreateArena(permanent_mem),
frame_arenas: [
CreateArena(frame_mem),
CreateArena(frame_mem),
],
window: window,
};
Push(&vk, SI.Renderer);
success = LoadGlobalFunctions();
if (success) success = InitInstance(&vk);
if (success)
{
LoadInstanceFunctions(&vk);
EnableVLayers(&vk);
}
if (success) success = InitSurface(&vk);
if (success) success = InitDevice(&vk);
if (success) success = InitVMA(&vk);
if (success) success = CreateSwapchain(&vk);
if (success) success = CreateDrawImages(&vk);
if (success) success = InitFrameStructures(&vk);
if (success) success = InitDescriptors(&vk);
if (success) InitBuffers(&vk);
if (success) success = InitConversionPipeline(&vk);
Result!(Vulkan) result = {
ok: success,
value: vk,
};
if (!success)
{
Logf("error initializing vulkan");
}
return result;
}
bool
InitConversionPipeline(Vulkan* vk)
{
Push(vk, SI.Pipelines);
VkDescriptorBindingFlags[] binding_flags;
binding_flags[] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT;
VkDescriptorSetLayoutBindingFlagsCreateInfo flag_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
bindingCount: cast(u32)binding_flags.length,
pBindingFlags: binding_flags.ptr,
};
VkDescriptorSetLayoutBinding[] layout_bindings = [
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT },
{ binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT },
];
VkDescriptorSetLayoutCreateInfo set_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
pNext: &flag_info,
bindingCount: cast(u32)layout_bindings.length,
pBindings: layout_bindings.ptr,
flags: VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
};
VkResult result = vkCreateDescriptorSetLayout(vk.device, &set_info, null, &vk.conv_desc_layout);
bool success = VkCheck("InitConversionPipeline failure: vkCreateDescriptorSetLayout error", result);
if (success)
{
VkDescriptorSetAllocateInfo alloc_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
descriptorSetCount: 1,
pSetLayouts: &vk.conv_desc_layout,
descriptorPool: vk.desc_pool,
};
result = vkAllocateDescriptorSets(vk.device, &alloc_info, &vk.conv_desc_set);
success = VkCheck("InitConversionPipeline failure: vkAllocateDescriptorSets error", result);
}
if (success)
{
VkPushConstantRange const_range = {
offset: 0,
size: cast(VkDeviceSize)ConvPushConst.sizeof,
stageFlags: VK_SHADER_STAGE_COMPUTE_BIT,
};
VkPipelineLayoutCreateInfo layout_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
setLayoutCount: 1,
pSetLayouts: &vk.conv_desc_layout,
pushConstantRangeCount: 1,
pPushConstantRanges: &const_range,
};
result = vkCreatePipelineLayout(vk.device, &layout_info, null, &vk.conv_pipeline_layout);
success = VkCheck("InitConversionPipeline failure: vkCreatePipelineLayout error", result);
}
if (success)
{
u32 channels = 1;
CompPipelineInfo conv_info = {
shader: "shaders/convert.comp.spv",
layout: &vk.conv_pipeline_layout,
spec: {
data: &channels,
size: u32.sizeof,
entries: [
{
constantID: 0,
size: u32.sizeof,
offset: 0,
}
],
},
};
vk.r_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info);
channels = 2;
vk.rg_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info);
channels = 3;
vk.rgb_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info);
}
return success;
}
void
CreateBuffer(Vulkan* vk, Buffer* buf, BufferType type, u64 size, bool host_visible)
{
assert(type != BT.None, "CreateBuffer failure: type is None");
VkBufferCreateInfo buffer_info = {
sType: VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
usage: type,
size: size,
};
VmaAllocationCreateInfo alloc_info = {
usage: VMA_MEMORY_USAGE_UNKNOWN,
flags: VMA_ALLOCATION_CREATE_MAPPED_BIT,
};
if (host_visible)
{
alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
alloc_info.preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
}
else
{
buffer_info.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
alloc_info.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
}
if (vk.queues.gfx_index != vk.queues.tfer_index)
{
buffer_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
buffer_info.queueFamilyIndexCount = 2;
buffer_info.pQueueFamilyIndices = cast(const u32*)[vk.queues.gfx_index, vk.queues.tfer_index];
}
VmaAllocationInfo vma_info;
VkResult result = vmaCreateBuffer(vk.vma, &buffer_info, &alloc_info, &buf.buffer, &buf.alloc, &vma_info);
// TODO: handle errors here then reallocate buffer
assert(VkCheck("CreateBuffer failure: vmaCreateBuffer error", result), "CreateBuffer failure");
buf.size = size;
}
void
InitBuffers(Vulkan* vk)
{
Push(vk, SI.Buffers);
vk.global_buf = CreateMappedBuffer!(GlobalUniforms)(vk, BT.Uniform, 1);
vk.shader_buf = CreateMappedBuffer!(ShaderUniforms)(vk, BT.Uniform, 1);
u64 transfer_size = MB(64);
vk.transfer_buf = CreateMappedBuffer!(u8)(vk, BT.Staging, transfer_size);
u64 ui_size = MB(30);
vk.ui_vert_buf = CreateMappedBuffer!(UIVertex)(vk, BT.Vertex, ui_size / UIVertex.sizeof);
vk.ui_index_buf = CreateMappedBuffer!(u32)(vk, BT.Index, ui_size / u32.sizeof);
}
void
BindUIBuffers(Vulkan* vk)
{
VkDeviceSize offset = 0;
vkCmdBindIndexBuffer(vk.cmds[vk.frame_index], vk.ui_index_buf.buffer, 0, VK_INDEX_TYPE_UINT32);
vkCmdBindVertexBuffers(vk.cmds[vk.frame_index], 0, 1, &vk.ui_vert_buf.buffer, &offset);
}
void
BindBuffers(Vulkan* vk, Buffer* index_buffer, Buffer* vertex_buffer)
{
VkDeviceSize offset = 0;
vkCmdBindIndexBuffer(vk.cmds[vk.frame_index], index_buffer.buffer, 0, VK_INDEX_TYPE_UINT32);
vkCmdBindVertexBuffers(vk.cmds[vk.frame_index], 0, 1, &vertex_buffer.buffer, &offset);
}
MappedBuffer!(T)
CreateMappedBuffer(T)(Vulkan* vk, BufferType type, u64 count)
{
MappedBuffer!(T) buf;
CreateBuffer(vk, &buf.base, type, T.sizeof * count, true);
buf.data = MapBuffer!(T)(vk, &buf.base, count);
return buf;
}
T[]
MapBuffer(T)(Vulkan* vk, Buffer* buffer, u64 count)
{
void* ptr;
vmaMapMemory(vk.vma, buffer.alloc, &ptr);
return (cast(T*)ptr)[0 .. count];
}
VkImage
CurrentImage(Vulkan* vk)
{
return vk.present_images[vk.image_index].image;
}
UIVertex[]
GetUIVertexBuffer(Vulkan* vk)
{
return vk.ui_vert_buf.data;
}
u32[]
GetUIIndexBuffer(Vulkan* vk)
{
return vk.ui_index_buf.data;
}
void
BeginFrame(Vulkan* vk)
{
// TODO: move vkWaitForFences so it no longer holds up the frame, will need to change how fences are handled in regards to images though
VkResult result = vkWaitForFences(vk.device, 1, vk.render_fences.ptr + vk.frame_index, VK_TRUE, 1000000000);
VkCheckA("BeginFrame failure: vkWaitForFences error", result);
result = vkResetFences(vk.device, 1, vk.render_fences.ptr + vk.frame_index);
VkCheckA("BeginFrame failure: vkResetFences error", result);
result = vkAcquireNextImageKHR(vk.device, vk.swapchain, 1000000000, vk.acquire_sems[vk.frame_index], null, &vk.image_index);
if (result == VK_ERROR_OUT_OF_DATE_KHR)
{
RecreateSwapchain(vk);
}
else if (result != VK_SUBOPTIMAL_KHR)
{
VkCheckA("BeginFrame failure: vkAcquireNextImageKHR error", result);
}
result = vkResetCommandBuffer(vk.cmds[vk.frame_index], 0);
VkCheckA("BeginFrame failure: vkResetCommandBuffer failure", result);
VkCommandBufferBeginInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
};
result = vkBeginCommandBuffer(vk.cmds[vk.frame_index], &cmd_info);
VkCheckA("BeginFrame failure: vkBeginCommandBuffer error", result);
}
void
BeginRender(Vulkan* vk)
{
// TODO: probably get rid of these
Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
Transition(vk.cmds[vk.frame_index], &vk.depth_image, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL);
VkImage image = CurrentImage(vk);
Transition(vk.cmds[vk.frame_index], image, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
VkRenderingAttachmentInfo col_attach = {
sType: VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
imageView: vk.draw_image.view,
imageLayout: vk.draw_image.layout,
loadOp: (vk.enable_clear_color ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_LOAD), // CLEAR instead of LOAD if wanting to clear colors, also clearColor value (or whatever)
storeOp: VK_ATTACHMENT_STORE_OP_STORE,
clearValue: {
color: vk.clear_color,
},
};
VkRenderingAttachmentInfo depth_attach = {
sType: VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
imageView: vk.depth_image.view,
imageLayout: vk.depth_image.layout,
loadOp: VK_ATTACHMENT_LOAD_OP_CLEAR,
storeOp: VK_ATTACHMENT_STORE_OP_STORE,
};
VkRenderingInfo render_info = {
sType: VK_STRUCTURE_TYPE_RENDERING_INFO,
layerCount: 1,
colorAttachmentCount: 1,
pColorAttachments: &col_attach,
pDepthAttachment: &depth_attach,
renderArea: {
extent: {
width: vk.swapchain_extent.width,
height: vk.swapchain_extent.height,
},
},
};
vkCmdBeginRendering(vk.cmds[vk.frame_index], &render_info);
}
void
SetClearColor(Vulkan* vk, Vec4 color)
{
vk.clear_color.float32[0] = color.r;
vk.clear_color.float32[1] = color.g;
vk.clear_color.float32[2] = color.b;
vk.clear_color.float32[3] = color.a;
}
void
PrepComputeDrawImage(Vulkan* vk)
{
Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_GENERAL);
}
void
FinishFrame(Vulkan* vk)
{
scope(exit)
{
vk.last_pipeline[vk.frame_index] = null;
vk.frame_index = (vk.frame_index + 1) % FRAME_OVERLAP;
}
VkImage image = CurrentImage(vk);
VkSemaphore acquire_sem = vk.acquire_sems[vk.frame_index];
VkSemaphore submit_sem = vk.submit_sems[vk.image_index];
vkCmdEndRendering(vk.cmds[vk.frame_index]);
Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
VkExtent2D extent = {
width: vk.swapchain_extent.width,
height: vk.swapchain_extent.height,
};
// TODO: Find out how to copy from same dimension images (pretty sure its not blitting)
Copy(vk.cmds[vk.frame_index], &vk.draw_image.base, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, extent, extent);
Transition(vk.cmds[vk.frame_index], image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
VkResult result = vkEndCommandBuffer(vk.cmds[vk.frame_index]);
VkCheckA("FinishFrame failure: vkEndCommandBuffer error", result);
VkCommandBufferSubmitInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
commandBuffer: vk.cmds[vk.frame_index],
};
VkSemaphoreSubmitInfo wait_info = {
sType: VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
semaphore: acquire_sem,
stageMask: VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT,
value: 1,
};
VkSemaphoreSubmitInfo signal_info = {
sType: VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
semaphore: submit_sem,
stageMask: VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT,
value: 1,
};
VkSubmitInfo2 submit_info = {
sType: VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
waitSemaphoreInfoCount: 1,
pWaitSemaphoreInfos: &wait_info,
signalSemaphoreInfoCount: 1,
pSignalSemaphoreInfos: &signal_info,
commandBufferInfoCount: 1,
pCommandBufferInfos: &cmd_info,
};
result = vkQueueSubmit2(vk.queues.gfx_queue, 1, &submit_info, vk.render_fences[vk.frame_index]);
VkCheckA("FinishFrame failure: vkQueueSubmit2 error", result);
VkPresentInfoKHR present_info = {
sType: VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
swapchainCount: 1,
pSwapchains: &vk.swapchain,
waitSemaphoreCount: 1,
pWaitSemaphores: &submit_sem,
pImageIndices: &vk.image_index,
};
result = vkQueuePresentKHR(vk.queues.gfx_queue, &present_info);
if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR)
{
RecreateSwapchain(vk);
}
else
{
VkCheckA("FinishFrame failure: vkQueuePresentKHR failure", result);
}
}
void
Draw(Vulkan* vk, u32 index_count, u32 instance_count)
{
vkCmdDraw(vk.cmds[vk.frame_index], index_count, instance_count, 0, 0);
}
void
DrawIndexed(Vulkan* vk, u32 index_count, u32 instance_count, u32 index_offset)
{
vkCmdDrawIndexed(vk.cmds[vk.frame_index], index_count, instance_count, index_offset, 0, 0);
}
bool
ImmSubmit(Vulkan* vk, void delegate() fn)
{
VkResult result = vkWaitForFences(vk.device, 1, &vk.imm_fence, true, 999999999);
bool success = VkCheck("ImmSubmit failure: vkWaitForFences error", result);
if (success)
{
result = vkResetFences(vk.device, 1, &vk.imm_fence);
success = VkCheck("ImmSubmit failure: vkResetFences error", result);
}
if (success)
{
result = vkResetCommandBuffer(vk.imm_cmd, 0);
success = VkCheck("ImmSubmit failure: vkResetCommandBuffer error", result);
}
bool imm_started;
if (success)
{
VkCommandBufferBeginInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
};
result = vkBeginCommandBuffer(vk.imm_cmd, &cmd_info);
imm_started = success = VkCheck("ImmSubmit failure: vkBeginCommandBuffer error", result);
}
if (success)
{
fn();
result = vkEndCommandBuffer(vk.imm_cmd);
success = VkCheck("ImmSubmit failure: vkEndCommandBuffer error", result);
}
if (success)
{
VkCommandBufferSubmitInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
commandBuffer: vk.imm_cmd,
};
VkSubmitInfo2 submit_info = {
sType: VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
commandBufferInfoCount: 1,
pCommandBufferInfos: &cmd_info,
};
result = vkQueueSubmit2(vk.tfer_queue, 1, &submit_info, vk.imm_fence);
success = VkCheck("ImmSubmit failure: vkQueueSubmit2 error", result);
}
return success;
}
bool
TransferAssets(Vulkan* vk)
{
return true;
}
pragma(inline): void
CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, u32 ch, u8[] data)
{
CreateImageView(vk, view, w, h);
if (ch == 4)
{
assert(Transfer(vk, view, data, w, h), "CreateImageView failure: Image Transfer error");
}
else
{
Buffer buf;
CreateBuffer(vk, &buf, BT.Storage, w * h * ch, false);
assert(Transfer(vk, &buf, data), "CreateImageView failure: Buffer Transfer error");
ImageView conv_view;
CreateImageView(vk, &conv_view, w, h, FMT.RGBA_F32);
WriteConvDescriptor(vk, &buf);
WriteConvDescriptor(vk, &conv_view);
BeginComputePass(vk);
vkCmdBindDescriptorSets(
vk.comp_cmd,
VK_PIPELINE_BIND_POINT_COMPUTE,
vk.conv_pipeline_layout,
0,
1,
&vk.conv_desc_set,
0,
null
);
VkPipeline pipeline = ch == 1 ? vk.r_to_rgba_pipeline.handle :
ch == 2 ? vk.rg_to_rgba_pipeline.handle :
vk.rgb_to_rgba_pipeline.handle;
vkCmdBindPipeline(vk.comp_cmd, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline);
ConvPushConst pc = {
x: w,
y: h,
};
vkCmdPushConstants(
vk.comp_cmd,
vk.conv_pipeline_layout,
VK_SHADER_STAGE_COMPUTE_BIT,
0,
ConvPushConst.sizeof,
&pc
);
Transition(vk.comp_cmd, &conv_view, VK_IMAGE_LAYOUT_GENERAL);
Dispatch(vk, vk.comp_cmd);
Transition(vk.comp_cmd, view, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
VkExtent2D extent = { width: w, height: h };
Copy(vk.comp_cmd, &conv_view.base, &view.base, extent, extent);
Transition(vk.comp_cmd, view, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
FinishComputePass(vk);
vkWaitForFences(vk.device, 1, &vk.comp_fence, VK_TRUE, u64.max);
vkQueueWaitIdle(vk.tfer_queue);
Destroy(vk, &buf);
Destroy(&conv_view, vk.device, vk.vma);
}
}
pragma(inline): void
BeginComputePass(Vulkan* vk)
{
VkResult result = vkWaitForFences(vk.device, 1, &vk.comp_fence, VK_TRUE, 1000000000);
VkCheckA("BeginComputePass failure: vkWaitForFences error", result);
result = vkResetFences(vk.device, 1, &vk.comp_fence);
VkCheckA("BeginComputepass failure: vkResetFences error", result);
result = vkResetCommandBuffer(vk.comp_cmd, 0);
VkCheckA("BeginComputePass failure: vkResetCommandBuffer error", result);
VkCommandBufferBeginInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
};
result = vkBeginCommandBuffer(vk.comp_cmd, &cmd_info);
VkCheckA("BeginComputePass failure: vkBeginCommandBuffer error", result);
}
pragma(inline): void
FinishComputePass(Vulkan* vk)
{
VkResult result = vkEndCommandBuffer(vk.comp_cmd);
VkCheckA("FinishComputePass failure: vkEndCommandBuffer error", result);
VkCommandBufferSubmitInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
commandBuffer: vk.comp_cmd,
};
VkSubmitInfo2 submit_info = {
sType: VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
commandBufferInfoCount: 1,
pCommandBufferInfos: &cmd_info,
};
result = vkQueueSubmit2(vk.gfx_queue, 1, &submit_info, vk.comp_fence);
VkCheckA("FinishComputePass failure: vkQueueSubmit2 error", result);
}
pragma(inline): void
CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, Format format = FMT.RGBA_UNORM)
{
VmaAllocationCreateInfo alloc_info = {
usage: VMA_MEMORY_USAGE_GPU_ONLY,
requiredFlags: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
};
VkImageCreateInfo image_info = {
sType: VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
imageType: VK_IMAGE_TYPE_2D,
mipLevels: 1,
arrayLayers: 1,
format: format,
tiling: VK_IMAGE_TILING_OPTIMAL,
initialLayout: VK_IMAGE_LAYOUT_UNDEFINED,
usage: format == FMT.RGBA_F32 ? (VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT) : (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
samples: VK_SAMPLE_COUNT_1_BIT,
extent: {
width: w,
height: h,
depth: 1,
},
};
if (vk.gfx_index != vk.tfer_index)
{
image_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
image_info.queueFamilyIndexCount = 2;
image_info.pQueueFamilyIndices = cast(const u32*)[vk.gfx_index, vk.tfer_index];
}
view.layout = VK_IMAGE_LAYOUT_UNDEFINED;
view.format = VK_FORMAT_R8G8B8A8_SRGB;
VkResult result = vmaCreateImage(vk.vma, &image_info, &alloc_info, &view.image, &view.alloc, null);
// TODO: handle errors and realloc
assert(VkCheck("CreateImageView failure: vmaCreateImage error", result), "CreateImageView failure");
VkImageViewCreateInfo view_info = {
sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
image: view.image,
viewType: VK_IMAGE_VIEW_TYPE_2D,
format: format,
subresourceRange: {
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
levelCount: 1,
layerCount: 1,
},
};
result = vkCreateImageView(vk.device, &view_info, null, &view.view);
// TODO: also handle here
assert(VkCheck("CreateImageView failure: vkCreateImageView error", result), "CreateImageView failure");
}
u32
Pop(Vulkan* vk, DescType type)
{
DescBindings* bindings = vk.desc_bindings.ptr + type;
assert(bindings.count > 0, "Pop failure: no free bindings remaining");
bindings.count -= 1;
return bindings.free[bindings.count];
}
void
Push(Vulkan* vk, u32 index, DescType type)
{
DescBindings* bindings = vk.desc_bindings.ptr + type;
bindings.free[bindings.count] = index;
bindings.count += 1;
}
u32
Pop(Vulkan* vk, string name, DescType type)
{
DescBindings* bindings = vk.desc_bindings.ptr + type;
u32 index;
auto result = bindings.lookup_table[name];
if (!result.ok)
{
// TODO: handle unbinding assets (maybe)
assert(bindings.count > 0, "Pop failure: no free bindings remaining");
bindings.count -= 1;
index = bindings.free[bindings.count];
}
else
{
index = result.value;
}
return index;
}
void
Push(Vulkan* vk, string name, DescType type)
{
DescBindings* bindings = vk.desc_bindings.ptr + type;
auto result = Delete(&bindings.lookup_table, name);
if (result.ok)
{
assert(bindings.count < bindings.free.length, "Push failure: attempt to push a binding into a full stack");
bindings.free[bindings.count] = result.value;
bindings.count += 1;
}
}
void
PushConstants(Vulkan* vk, PushConst* pc)
{
vkCmdPushConstants(
vk.cmds[vk.frame_index],
vk.pipeline_layout,
VK_SHADER_STAGE_VERTEX_BIT|VK_SHADER_STAGE_FRAGMENT_BIT|VK_SHADER_STAGE_COMPUTE_BIT,
0,
PushConst.sizeof,
pc
);
}
bool
Transfer(T)(Vulkan* vk, Buffer* buf, T[] data)
{
u8[] u8_data = (cast(u8*)(data.ptr))[0 .. T.sizeof * data.length];
return Transfer(vk, buf, u8_data);
}
bool
Transfer(Vulkan* vk, Buffer* buf, u8[] data)
{
bool success = TransferReady(vk);
u64 copied = 0;
while(copied != data.length && success)
{
if (copied != 0)
{
success = TransferReady(vk);
if (!success)
{
break;
}
}
u64 transfer_length = cast(u64)vk.transfer_buf.data.length;
u64 data_length = cast(u64)data.length - copied;
u64 copy_length = transfer_length > data_length ? data_length : transfer_length;
vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length];
auto fn = delegate()
{
VkBufferCopy copy = {
srcOffset: 0,
dstOffset: copied,
size: copy_length,
};
vkCmdCopyBuffer(vk.imm_cmd, vk.transfer_buf.buffer, buf.buffer, 1, &copy);
};
success = ImmSubmit(vk, fn);
copied += copy_length;
}
return success;
}
bool
Transfer(T)(Vulkan* vk, Buffer* buf, T* ptr)
{
assert(T.sizeof < vk.transfer_buf.data.length, "Transfer failure: structure size is too large");
bool success = TransferReady(vk);
if (success)
{
memcpy(vk.transfer_buf.data.ptr, ptr, T.sizeof);
auto fn = delegate()
{
VkBufferCopy copy = {
srcOffset: 0,
dstOffset: 0,
size: T.sizeof,
};
vkCmdCopyBuffer(vk.imm_cmd, vk.transfer_buf.buffer, buf.buffer, 1, &copy);
};
success = ImmSubmit(vk, fn);
}
return success;
}
// Needs to be done before writing to the transfer buffer as otherwise it'll overwrite it while previous transfers are occurring
bool
TransferReady(Vulkan* vk)
{
VkResult result = vkWaitForFences(vk.device, 1, &vk.imm_fence, true, 999999999);
return VkCheck("Transfer failure: vkWaitForFences error", result);
}
pragma(inline): bool
Transfer(Vulkan* vk, ImageView* view, u8[] data, u32 w, u32 h)
{
return Transfer(vk, &view.base, data, w, h);
}
bool
Transfer(Vulkan* vk, Image* image, u8[] data, u32 w, u32 h)
{
bool success = true;
u64 copied = 0;
while(copied != data.length)
{
u64 transfer_length = cast(u64)vk.transfer_buf.data.length;
u64 data_length = cast(u64)data.length - copied;
u64 copy_length = transfer_length > data_length ? data_length : transfer_length;
vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length];
auto fn = delegate()
{
VkBufferImageCopy copy = {
bufferRowLength: w,
bufferImageHeight: h,
imageSubresource: {
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
layerCount: 1,
},
imageExtent: {
width: w,
height: h,
depth: 1,
},
bufferOffset: copied,
};
Transition(vk.imm_cmd, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
vkCmdCopyBufferToImage(vk.imm_cmd, vk.transfer_buf.buffer, image.image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy);
Transition(vk.imm_cmd, image, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
};
success = ImmSubmit(vk, fn);
copied += copy_length;
}
return success;
}
pragma(inline): void
Copy(VkCommandBuffer cmd, Image* src, Image* dst, VkExtent2D src_ext, VkExtent2D dst_ext)
{
Copy(cmd, src.image, dst.image, src.layout, dst.layout, src_ext, dst_ext);
}
pragma(inline): void
Copy(VkCommandBuffer cmd, Image* src, VkImage dst, VkImageLayout dst_layout, VkExtent2D src_ext, VkExtent2D dst_ext)
{
Copy(cmd, src.image, dst, src.layout, dst_layout, src_ext, dst_ext);
}
pragma(inline): void
Copy(VkCommandBuffer cmd, VkImage src, VkImage dst, VkImageLayout src_layout, VkImageLayout dst_layout, VkExtent2D src_ext, VkExtent2D dst_ext)
{
VkImageBlit2 blit = {
sType: VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
srcOffsets: [
{ x: 0, y: 0 },
{ x: cast(i32)src_ext.width, y: cast(i32)src_ext.height, z: 1 },
],
dstOffsets: [
{ x: 0, y: 0 },
{ x: cast(i32)dst_ext.width, y: cast(i32)dst_ext.height, z: 1 },
],
srcSubresource: {
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
baseArrayLayer: 0,
layerCount: 1,
mipLevel: 0,
},
dstSubresource: {
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
baseArrayLayer: 0,
layerCount: 1,
mipLevel: 0,
},
};
VkBlitImageInfo2 blit_info = {
sType: VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2,
srcImage: src,
srcImageLayout: src_layout,
dstImage: dst,
dstImageLayout: dst_layout,
filter: VK_FILTER_LINEAR,
regionCount: 1,
pRegions: &blit,
};
vkCmdBlitImage2(cmd, &blit_info);
}
void
Bind(Vulkan* vk, PipelineHandle* pipeline)
{
if (vk.last_pipeline[vk.frame_index] == null || vk.last_pipeline[vk.frame_index] != pipeline.handle)
{
vkCmdBindPipeline(vk.cmds[vk.frame_index], pipeline.type, pipeline.handle);
vk.last_pipeline[vk.frame_index] = pipeline.handle;
}
vkCmdBindDescriptorSets(
vk.cmds[vk.frame_index],
pipeline.type,
vk.pipeline_layout,
0,
cast(u32)vk.desc_sets.length,
vk.desc_sets.ptr,
0,
null
);
VkViewport viewport = {
x: 0.0,
y: 0.0,
width: cast(f32)vk.swapchain_extent.width,
height: cast(f32)vk.swapchain_extent.height,
minDepth: 0.0,
maxDepth: 1.0,
};
vkCmdSetViewport(vk.cmds[vk.frame_index], 0, 1, &viewport);
VkRect2D scissor = {
extent: {
width: vk.swapchain_extent.width,
height: vk.swapchain_extent.height,
},
};
vkCmdSetScissor(vk.cmds[vk.frame_index], 0, 1, &scissor);
}
pragma(inline): void
Transition(VkCommandBuffer cmd, VkImage image, VkImageLayout current_layout, VkImageLayout new_layout)
{
VkImageMemoryBarrier2 barrier = {
sType: VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
srcStageMask: VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
srcAccessMask: VK_ACCESS_2_MEMORY_WRITE_BIT,
dstStageMask: VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
dstAccessMask: VK_ACCESS_2_MEMORY_WRITE_BIT | VK_ACCESS_2_MEMORY_READ_BIT,
oldLayout: current_layout,
newLayout: new_layout,
image: image,
subresourceRange: {
aspectMask: new_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT,
baseMipLevel: 0,
levelCount: VK_REMAINING_MIP_LEVELS,
baseArrayLayer: 0,
layerCount: VK_REMAINING_ARRAY_LAYERS,
},
};
VkDependencyInfo dep_info = {
sType: VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
imageMemoryBarrierCount: 1,
pImageMemoryBarriers: &barrier,
};
vkCmdPipelineBarrier2(cmd, &dep_info);
}
pragma(inline): void
Transition(VkCommandBuffer cmd, ImageView* view, VkImageLayout new_layout)
{
Transition(cmd, view.image, view.layout, new_layout);
view.layout = new_layout;
}
pragma(inline): void
Transition(VkCommandBuffer cmd, Image* image, VkImageLayout new_layout)
{
Transition(cmd, image.image, image.layout, new_layout);
image.layout = new_layout;
}
Result!(Shader)
BuildShader(Vulkan* vk, u8[] bytes)
{
Result!(Shader) shader;
VkShaderModuleCreateInfo shader_info = {
sType: VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
codeSize: bytes.length,
pCode: cast(uint*)bytes.ptr,
};
VkResult result = vkCreateShaderModule(vk.device, &shader_info, null, &shader.value);
shader.ok = VkCheck("vkCreateShaderModule failure", result);
return shader;
}
Pipeline
CreateGraphicsPipeline(Vulkan* vk, GfxPipelineInfo* build_info)
{
VkDynamicState[] dyn_state = [ VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR ];
VkPipelineDynamicStateCreateInfo dyn_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
pDynamicStates: dyn_state.ptr,
dynamicStateCount: cast(u32)dyn_state.length,
};
VkPipelineInputAssemblyStateCreateInfo assembly_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
topology: VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
primitiveRestartEnable: VK_FALSE,
};
VkPipelineRasterizationStateCreateInfo rasterization_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
cullMode: VK_CULL_MODE_BACK_BIT,
polygonMode: VK_POLYGON_MODE_FILL,
lineWidth: 1.0,
frontFace: VK_FRONT_FACE_COUNTER_CLOCKWISE,
};
VkPipelineMultisampleStateCreateInfo multisample_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
rasterizationSamples: VK_SAMPLE_COUNT_1_BIT,
minSampleShading: 1.0,
alphaToCoverageEnable: VK_FALSE,
alphaToOneEnable: VK_FALSE,
};
VkPipelineDepthStencilStateCreateInfo depth_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
depthTestEnable: VK_TRUE,
depthWriteEnable: VK_TRUE,
depthCompareOp: VK_COMPARE_OP_GREATER_OR_EQUAL,
depthBoundsTestEnable: VK_FALSE,
stencilTestEnable: VK_FALSE,
minDepthBounds: 0.0,
maxDepthBounds: 1.0,
};
VkPipelineRenderingCreateInfo rendering_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
colorAttachmentCount: 1,
pColorAttachmentFormats: &vk.draw_image.format,
depthAttachmentFormat: vk.depth_image.format,
};
VkPipelineColorBlendAttachmentState blend_state = {
colorWriteMask: VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
blendEnable: VK_TRUE,
srcColorBlendFactor: VK_BLEND_FACTOR_SRC_ALPHA,
dstColorBlendFactor: VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
colorBlendOp: VK_BLEND_OP_ADD,
srcAlphaBlendFactor: VK_BLEND_FACTOR_SRC_ALPHA,
dstAlphaBlendFactor: VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
alphaBlendOp: VK_BLEND_OP_ADD,
};
VkPipelineColorBlendStateCreateInfo blend_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
logicOpEnable: VK_FALSE,
logicOp: VK_LOGIC_OP_COPY,
attachmentCount: 1,
pAttachments: &blend_state,
};
VkVertexInputBindingDescription vertex_input_desc = {
binding: 0,
inputRate: cast(VkVertexInputRate)build_info.input_rate,
stride: build_info.input_rate_stride,
};
VkPipelineVertexInputStateCreateInfo input_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
vertexBindingDescriptionCount: 1,
pVertexBindingDescriptions: &vertex_input_desc,
vertexAttributeDescriptionCount: cast(u32)build_info.vertex_attributes.length,
pVertexAttributeDescriptions: build_info.vertex_attributes.ptr,
};
VkPipelineViewportStateCreateInfo viewport_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
viewportCount: 1,
scissorCount: 1,
};
VkPipelineShaderStageCreateInfo[] shader_info = [
{
sType: VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
stage: VK_SHADER_STAGE_VERTEX_BIT,
pName: "main",
},
{
sType: VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
stage: VK_SHADER_STAGE_FRAGMENT_BIT,
pName: "main",
},
];
Arena* arena = &vk.frame_arenas[0];
u8[] vert_bytes = LoadAssetData(arena, build_info.vertex_shader);
u8[] frag_bytes = LoadAssetData(arena, build_info.frag_shader);
assert(vert_bytes && frag_bytes, "Unable to load shaders");
Result!(Shader) vert_module = BuildShader(vk, vert_bytes);
Result!(Shader) frag_module = BuildShader(vk, frag_bytes);
assert(vert_module.ok && frag_module.ok, "Unable to build vulkan shaders");
scope(exit)
{
Destroy(vk, vert_module.value);
Destroy(vk, frag_module.value);
}
__traits(getMember, shader_info.ptr + 0, "module") = vert_module.value;
__traits(getMember, shader_info.ptr + 1, "module") = frag_module.value;
VkSpecializationInfo vert_spec_info = {
dataSize: build_info.vert_spec.size,
mapEntryCount: cast(u32)build_info.vert_spec.entries.length,
pMapEntries: build_info.vert_spec.entries.ptr,
pData: build_info.vert_spec.data,
};
if (build_info.vert_spec.entries.length > 0)
{
shader_info[0].pSpecializationInfo = &vert_spec_info;
}
VkSpecializationInfo frag_spec_info = {
dataSize: build_info.frag_spec.size,
mapEntryCount: cast(u32)build_info.frag_spec.entries.length,
pMapEntries: build_info.frag_spec.entries.ptr,
pData: build_info.frag_spec.data,
};
if (build_info.frag_spec.entries.length > 0)
{
shader_info[1].pSpecializationInfo = &frag_spec_info;
}
VkGraphicsPipelineCreateInfo create_info = {
sType: VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
pNext: &rendering_info,
pVertexInputState: &input_info,
pInputAssemblyState: &assembly_info,
pViewportState: &viewport_info,
pRasterizationState: &rasterization_info,
pMultisampleState: &multisample_info,
pColorBlendState: &blend_info,
pDepthStencilState: &depth_info,
pDynamicState: &dyn_info,
stageCount: cast(u32)shader_info.length,
pStages: shader_info.ptr,
layout: vk.pipeline_layout,
};
PipelineHandle pipeline = { type: VK_PIPELINE_BIND_POINT_GRAPHICS };
VkResult result = vkCreateGraphicsPipelines(vk.device, null, 1, &create_info, null, &pipeline.handle);
assert(VkCheck("CreateGraphicsPipeline failure", result), "Unable to build pipeline");
return pipeline;
}
Pipeline
CreateComputePipeline(Vulkan* vk, CompPipelineInfo* comp_info)
{
VkComputePipelineCreateInfo info = {
sType: VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
layout: comp_info.layout ? *comp_info.layout : vk.pipeline_layout,
stage: {
sType: VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
stage: VK_SHADER_STAGE_COMPUTE_BIT,
pName: "main",
},
};
u8[] comp_bytes = LoadAssetData(&vk.frame_arenas[0], comp_info.shader);
assert(comp_bytes != null, "Unable to load compute shader data");
Result!(Shader) comp_module = BuildShader(vk, comp_bytes);
assert(comp_module.ok, "Unable to build compute shader");
scope(exit) Destroy(vk, comp_module.value);
__traits(getMember, &info.stage, "module") = comp_module.value;
VkSpecializationInfo spec_info = {
dataSize: comp_info.spec.size,
mapEntryCount: cast(u32)comp_info.spec.entries.length,
pMapEntries: comp_info.spec.entries.ptr,
pData: comp_info.spec.data,
};
if (comp_info.spec.entries.length > 0)
{
info.stage.pSpecializationInfo = &spec_info;
}
PipelineHandle pipeline = { type: VK_PIPELINE_BIND_POINT_COMPUTE };
VkResult result = vkCreateComputePipelines(vk.device, null, 1, &info, null, &pipeline.handle);
assert(VkCheck("CreateComputePipeline failure", result), "Unable to build pipeline");
return pipeline;
}
void
SetUniform(Vulkan* vk, GlobalUniforms* globals)
{
vk.global_buf.data[0] = *globals;
VkDescriptorBufferInfo buffer_info = {
buffer: vk.global_buf.buffer,
range: GlobalUniforms.sizeof,
};
VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared],
dstBinding: 0,
descriptorType: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
descriptorCount: 1,
pBufferInfo: &buffer_info,
};
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
}
void
WaitIdle(Vulkan* vk)
{
vkDeviceWaitIdle(vk.device);
}
void
Destroy(Vulkan* vk, Shader shader)
{
vkDestroyShaderModule(vk.device, shader, null);
}
void
Destroy(Vulkan* vk, Pipeline* pipeline)
{
vkDestroyPipeline(vk.device, pipeline.handle, null);
}
void
Destroy(Vulkan* vk)
{
vkDeviceWaitIdle(vk.device);
alias N = Node!(SI);
assert(vk.cleanup_list.first != null, "node null");
for(N* node = vk.cleanup_list.first; node != null; node = node.next)
{
switch (node.value)
{
case SI.Renderer:
DestroyRenderer(vk);
break;
case SI.Instance:
Destroy(vk.instance);
break;
case SI.Debug:
Destroy(vk.dbg_msg, vk.instance);
break;
case SI.Surface:
Destroy(vk.surface, vk.instance);
break;
case SI.Device:
Destroy(vk.device);
break;
case SI.Vma:
Destroy(vk.vma);
break;
case SI.FrameStructures:
DestroyFS(vk);
break;
case SI.Swapchain:
Destroy(vk.swapchain, vk.present_images, vk.device);
break;
case SI.DrawImages:
Destroy(&vk.draw_image, &vk.depth_image, vk.device, vk.vma);
break;
case SI.Descriptors:
Destroy(vk.desc_pool, vk.desc_layouts, vk.pipeline_layout, vk.nearest_sampler, vk.device);
break;
case SI.Buffers:
Destroy(vk, &vk.transfer_buf);
Destroy(vk, &vk.ui_vert_buf);
Destroy(vk, &vk.ui_index_buf);
Destroy(vk, &vk.global_buf);
Destroy(vk, &vk.shader_buf);
break;
case SI.Pipelines:
DestroyPipelines(vk);
break;
default:
break;
}
}
}
void
DestroyPipelines(Vulkan* vk)
{
if (vk.conv_pipeline_layout)
{
vkDestroyPipelineLayout(vk.device, vk.conv_pipeline_layout, null);
}
if (vk.conv_desc_layout)
{
vkDestroyDescriptorSetLayout(vk.device, vk.conv_desc_layout, null);
}
if (vk.r_to_rgba_pipeline.handle)
{
vkDestroyPipeline(vk.device, vk.r_to_rgba_pipeline.handle, null);
}
if (vk.rg_to_rgba_pipeline.handle)
{
vkDestroyPipeline(vk.device, vk.rg_to_rgba_pipeline.handle, null);
}
if (vk.rgb_to_rgba_pipeline.handle)
{
vkDestroyPipeline(vk.device, vk.rgb_to_rgba_pipeline.handle, null);
}
}
void
Destroy(T)(Vulkan* vk, MappedBuffer!(T)* buf)
{
vmaUnmapMemory(vk.vma, buf.alloc);
Destroy(vk, &buf.base);
}
void
Destroy(Vulkan* vk, Buffer* buf)
{
vmaDestroyBuffer(vk.vma, buf.buffer, buf.alloc);
}
bool
InitDescriptors(Vulkan* vk)
{
Push(vk, SI.Descriptors);
bool success = true;
VkDescriptorPoolSize[] pool_sizes = [
{ type: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, descriptorCount: 4096 },
{ type: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 4096 },
{ type: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, descriptorCount: 4096 },
{ type: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, descriptorCount: 4096 },
{ type: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 4096 },
];
VkDescriptorPoolCreateInfo pool_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
poolSizeCount: cast(u32)pool_sizes.length,
pPoolSizes: pool_sizes.ptr,
maxSets: 12,
flags: VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
};
VkResult result = vkCreateDescriptorPool(vk.device, &pool_info, null, &vk.desc_pool);
success = VkCheck("vkCreateDescriptorPool failure", result);
if (success)
{
VkDescriptorBindingFlags[4] shared_binding_flags;
shared_binding_flags[] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT;
VkDescriptorSetLayoutBindingFlagsCreateInfo shared_flag_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
bindingCount: cast(u32)shared_binding_flags.length,
pBindingFlags: shared_binding_flags.ptr,
};
VkDescriptorSetLayoutBinding[] shared_bindings = [
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 2, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 3, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
];
VkDescriptorSetLayoutCreateInfo shared_set_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
pNext: &shared_flag_info,
bindingCount: cast(u32)shared_bindings.length,
pBindings: shared_bindings.ptr,
flags: VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
};
result = vkCreateDescriptorSetLayout(vk.device, &shared_set_info, null, &vk.desc_layouts[DT.Shared]);
success = VkCheck("vkCreateDescriptorSetLayout failure", result);
}
if (success)
{
VkDescriptorType[DT.max] type_lookup = [
DT.SampledImage: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
DT.Material: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
];
VkDescriptorBindingFlags bindless_flag = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
VkDescriptorSetLayoutBindingFlagsCreateInfo bindless_flag_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
bindingCount: 1,
pBindingFlags: &bindless_flag,
};
VkDescriptorSetLayoutBinding binding = {
binding: 0,
descriptorCount: 1024,
stageFlags: VK_SHADER_STAGE_ALL,
};
VkDescriptorSetLayoutCreateInfo bindless_set_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
pNext: &bindless_flag_info,
bindingCount: 1,
pBindings: &binding,
flags: VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
};
foreach(i; cast(u64)(DT.Shared+1) .. DT.max)
{
binding.descriptorType = type_lookup[i];
if (success)
{
result = vkCreateDescriptorSetLayout(vk.device, &bindless_set_info, null, vk.desc_layouts.ptr + i);
success = VkCheck("vkCreateDescriptorSetLayout failure", result);
}
}
}
if (success)
{
VkDescriptorSetAllocateInfo alloc_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
descriptorSetCount: cast(u32)DT.max,
pSetLayouts: vk.desc_layouts.ptr,
descriptorPool: vk.desc_pool,
};
result = vkAllocateDescriptorSets(vk.device, &alloc_info, vk.desc_sets.ptr);
success = VkCheck("vkAllocateDescriptorSets failure", result);
}
if (success)
{
VkPushConstantRange const_range = {
offset: 0,
size: cast(VkDeviceSize)PushConst.sizeof,
stageFlags: VK_SHADER_STAGE_COMPUTE_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_VERTEX_BIT,
};
VkPipelineLayoutCreateInfo layout_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
setLayoutCount: cast(u32)DT.max,
pushConstantRangeCount: 1,
pPushConstantRanges: &const_range,
pSetLayouts: vk.desc_layouts.ptr,
};
result = vkCreatePipelineLayout(vk.device, &layout_info, null, &vk.pipeline_layout);
success = VkCheck("vkCreatePipelineLayout failure", result);
}
if (success)
{
foreach(i; cast(u64)DT.min .. cast(u64)DT.max)
{
vk.desc_bindings[i].lookup_table = CreateHashTable!(string, u32)(8);
u32 DESC_MAX_BINDINGS = 512;
vk.desc_bindings[i].free = AllocArray!(u32)(&vk.arena, DESC_MAX_BINDINGS);
u32 free_count = 0;
for(i32 j = DESC_MAX_BINDINGS-1; j >= 0; j -= 1)
{
vk.desc_bindings[i].free[j] = cast(u32)free_count;
free_count += 1;
}
vk.desc_bindings[i].count = free_count;
}
}
if (success)
{
VkPhysicalDeviceProperties props;
vkGetPhysicalDeviceProperties(vk.physical_device, &props);
VkSamplerCreateInfo sampler_info = {
sType: VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
magFilter: VK_FILTER_NEAREST,
minFilter: VK_FILTER_NEAREST,
addressModeU: VK_SAMPLER_ADDRESS_MODE_REPEAT,
addressModeV: VK_SAMPLER_ADDRESS_MODE_REPEAT,
addressModeW: VK_SAMPLER_ADDRESS_MODE_REPEAT,
anisotropyEnable: VK_TRUE,
maxAnisotropy: props.limits.maxSamplerAnisotropy,
borderColor: VK_BORDER_COLOR_INT_OPAQUE_BLACK,
compareOp: VK_COMPARE_OP_ALWAYS,
mipmapMode: VK_SAMPLER_MIPMAP_MODE_LINEAR,
};
result = vkCreateSampler(vk.device, &sampler_info, null, &vk.nearest_sampler);
success = VkCheck("vkCreateSampler failure", result);
}
if (success)
{
VkDescriptorImageInfo sampler_info = {
sampler: vk.nearest_sampler,
};
VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared],
dstBinding: 3,
descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER,
pImageInfo: &sampler_info,
};
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
WriteDrawImageDesc(vk);
}
return success;
}
void
WriteDrawImageDesc(Vulkan* vk)
{
VkDescriptorImageInfo draw_image_info = {
imageView: vk.draw_image.view,
imageLayout: VK_IMAGE_LAYOUT_GENERAL,
};
VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared],
dstBinding: 2,
descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
pImageInfo: &draw_image_info,
};
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
}
void
WriteConvDescriptor(Vulkan* vk, Buffer* buf)
{
VkDescriptorBufferInfo buf_info = {
buffer: buf.buffer,
range: buf.size,
};
VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.conv_desc_set,
dstBinding: 1,
descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
pBufferInfo: &buf_info,
};
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
}
void
WriteConvDescriptor(Vulkan* vk, ImageView* view)
{
VkDescriptorImageInfo image_info = {
imageView: view.view,
imageLayout: VK_IMAGE_LAYOUT_GENERAL,
};
VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.conv_desc_set,
dstBinding: 0,
descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
pImageInfo: &image_info,
};
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
}
void
WriteDescriptors(Vulkan* vk, DescType type, Buffer[] buffers, u32[] indices)
{
assert(buffers.length > 0, "WriteDescriptors failure: Buffer length is 0");
VkDescriptorBufferInfo[] buffer_info = AllocArray!(VkDescriptorBufferInfo)(&vk.frame_arenas[vk.frame_index], buffers.length);
VkWriteDescriptorSet[] writes = AllocArray!(VkWriteDescriptorSet)(&vk.frame_arenas[vk.frame_index], buffers.length);
foreach(i, buf; buffers)
{
buffer_info[i].buffer = buf.buffer;
buffer_info[i].range = buf.size;
writes[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[i].dstSet = vk.desc_sets[type];
writes[i].dstArrayElement = indices[i];
writes[i].dstBinding = 0;
writes[i].descriptorCount = 1;
writes[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
writes[i].pBufferInfo = buffer_info.ptr + i;
}
vkUpdateDescriptorSets(vk.device, cast(u32)writes.length, writes.ptr, 0, null);
}
void
WriteDescriptors(Vulkan* vk, DescType type, ImageView[] views, u32[] indices)
{
assert(views.length > 0, "WriteDescriptors failure: ImageView length is 0");
VkDescriptorImageInfo[] image_info = AllocArray!(VkDescriptorImageInfo)(&vk.frame_arenas[vk.frame_index], views.length);
VkWriteDescriptorSet[] writes = AllocArray!(VkWriteDescriptorSet)(&vk.frame_arenas[vk.frame_index], views.length);
foreach(i, view; views)
{
image_info[i].imageView = view.view;
image_info[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
writes[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[i].dstSet = vk.desc_sets[type];
writes[i].dstArrayElement = indices[i];
writes[i].dstBinding = 0;
writes[i].descriptorCount = 1;
writes[i].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
writes[i].pImageInfo = image_info.ptr + i;
}
vkUpdateDescriptorSets(vk.device, cast(u32)writes.length, writes.ptr, 0, null);
}
pragma(inline): void
Dispatch(Vulkan* vk, VkCommandBuffer cmd)
{
f32 w = Ceil(cast(f32)(vk.swapchain_extent.width) / 16.0F);
f32 h = Ceil(cast(f32)(vk.swapchain_extent.height) / 16.0F);
vkCmdDispatch(cmd, cast(u32)w, cast(u32)h, 1);
}
pragma(inline): void
Dispatch(Vulkan* vk)
{
Dispatch(vk, vk.cmds[vk.frame_index]);
}
bool
VkCheck(string message, VkResult result)
{
bool success = true;
// TODO: Handle error cases that can be handled
if (result == VK_ERROR_OUT_OF_DEVICE_MEMORY)
{
assert(false, "Handle VK_ERROR_OUT_OF_DEVICE_MEMORY");
}
else if (result == VK_ERROR_OUT_OF_HOST_MEMORY)
{
assert(false, "Handle VK_ERROR_OUT_OF_HOST_MEMORY");
}
else if (result != VK_SUCCESS)
{
success = false;
char[512] buf;
buf[] = '\0';
buf.sformat("%s: %s", message, VkResultStr(result));
Logf("%r", buf);
}
return success;
}
void
VkCheckA(string message, VkResult result)
{
assert(VkCheck(message, result), "Aborting program due to failure");
}
bool
InitFrameStructures(Vulkan* vk)
{
Push(vk, SI.FrameStructures);
bool success = true;
Arena* arena = &vk.frame_arenas[0];
VkSemaphoreCreateInfo sem_info = { sType: VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
VkCommandPoolCreateInfo pool_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
flags: VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
};
VkFenceCreateInfo fence_info = {
sType: VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
flags: VK_FENCE_CREATE_SIGNALED_BIT,
};
VkCommandBufferAllocateInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
commandBufferCount: 1,
level: VK_COMMAND_BUFFER_LEVEL_PRIMARY,
};
u32 sem_count = cast(u32)vk.present_images.length;
vk.submit_sems = AllocArray!(VkSemaphore)(arena, sem_count);
foreach(i; 0 .. sem_count)
{
if (success)
{
VkResult result = vkCreateSemaphore(vk.device, &sem_info, null, vk.submit_sems.ptr + i);
success = VkCheck("vkCreateSemaphore failure", result);
}
}
foreach(i; 0 .. FRAME_OVERLAP)
{
VkResult result;
if (success)
{
pool_info.queueFamilyIndex = vk.gfx_index;
result = vkCreateCommandPool(vk.device, &pool_info, null, vk.cmd_pools.ptr + i);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
{
cmd_info.commandPool = vk.cmd_pools[i];
result = vkAllocateCommandBuffers(vk.device, &cmd_info, vk.cmds.ptr + i);
success = VkCheck("vkAllocateCommandBuffers failure", result);
}
if (success)
{
result = vkCreateFence(vk.device, &fence_info, null, vk.render_fences.ptr + i);
success = VkCheck("vkCreateFence failure", result);
}
if (success)
{
result = vkCreateSemaphore(vk.device, &sem_info, null, vk.acquire_sems.ptr + i);
success = VkCheck("vkCreateSemaphore failure", result);
}
}
if (success)
{
pool_info.queueFamilyIndex = vk.tfer_index;
VkResult result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.imm_pool);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
{
cmd_info.commandPool = vk.imm_pool;
VkResult result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.imm_cmd);
success = VkCheck("vkAllocateCommandBuffers failure", result);
}
if (success)
{
VkResult result = vkCreateFence(vk.device, &fence_info, null, &vk.imm_fence);
success = VkCheck("vkCreateFence failure", result);
}
if (success)
{
pool_info.queueFamilyIndex = vk.gfx_index;
VkResult result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.comp_cmd_pool);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
{
cmd_info.commandPool = vk.comp_cmd_pool;
VkResult result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.comp_cmd);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
{
VkResult result = vkCreateFence(vk.device, &fence_info, null, &vk.comp_fence);
success = VkCheck("vkCreateFence failure", result);
}
return success;
}
VkFormat
GetDrawImageFormat(Vulkan* vk)
{
VkFormat selected_format;
foreach(format; VK_IMAGE_FORMATS)
{
VkImageFormatProperties props;
VkResult result = vkGetPhysicalDeviceImageFormatProperties(
vk.physical_device,
format,
VK_IMAGE_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL,
VK_DRAW_IMAGE_USAGE_FLAGS,
0,
&props
);
if (result == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
continue;
}
if (result == VK_SUCCESS)
{
selected_format = format;
break;
}
}
return selected_format;
}
bool
CreateDrawImages(Vulkan* vk)
{
Push(vk, SI.DrawImages);
bool success = true;
VkFormat draw_format = GetDrawImageFormat(vk);
VkFormat depth_format = VK_FORMAT_D32_SFLOAT;
VmaAllocationCreateInfo alloc_info = {
usage: VMA_MEMORY_USAGE_GPU_ONLY,
requiredFlags: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
};
VkImageCreateInfo image_info = {
sType: VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
imageType: VK_IMAGE_TYPE_2D,
mipLevels: 1,
arrayLayers: 1,
samples: VK_SAMPLE_COUNT_1_BIT,
tiling: VK_IMAGE_TILING_OPTIMAL,
usage: VK_DRAW_IMAGE_USAGE_FLAGS,
extent: vk.swapchain_extent,
format: draw_format,
};
VkResult result = vmaCreateImage(vk.vma, &image_info, &alloc_info, &vk.draw_image.image, &vk.draw_image.alloc, null);
success = VkCheck("vmaCreateImage failure", result);
if (success)
{
VkImageViewCreateInfo view_info = {
sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
image: vk.draw_image.image,
format: draw_format,
viewType: VK_IMAGE_VIEW_TYPE_2D,
subresourceRange: {
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
baseMipLevel: 0,
levelCount: 1,
baseArrayLayer: 0,
layerCount: 1,
},
};
result = vkCreateImageView(vk.device, &view_info, null, &vk.draw_image.view);
success = VkCheck("vkCreateImageView failure", result);
}
if (success)
{
VkImageCreateInfo depth_image_info = {
sType: VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
imageType: VK_IMAGE_TYPE_2D,
mipLevels: 1,
arrayLayers: 1,
samples: VK_SAMPLE_COUNT_1_BIT,
tiling: VK_IMAGE_TILING_OPTIMAL,
format: depth_format,
usage: VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
extent: vk.swapchain_extent,
};
result = vmaCreateImage(vk.vma, &depth_image_info, &alloc_info, &vk.depth_image.image, &vk.depth_image.alloc, null);
success = VkCheck("vmaCreateImage failure", result);
}
if (success)
{
VkImageViewCreateInfo depth_view_info = {
sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
image: vk.depth_image.image,
viewType: VK_IMAGE_VIEW_TYPE_2D,
format: depth_format,
subresourceRange: {
aspectMask: VK_IMAGE_ASPECT_DEPTH_BIT,
baseMipLevel: 0,
levelCount: 1,
baseArrayLayer: 0,
layerCount: 1,
},
};
result = vkCreateImageView(vk.device, &depth_view_info, null, &vk.depth_image.view);
success = VkCheck("vmaCreateImageView failure", result);
}
vk.draw_image.format = draw_format;
vk.draw_image.layout = VK_IMAGE_LAYOUT_UNDEFINED;
vk.depth_image.format = depth_format;
vk.depth_image.layout = VK_IMAGE_LAYOUT_UNDEFINED;
return success;
}
void
SelectSwapchainFormats(Vulkan* vk)
{
Arena* arena = &vk.frame_arenas[0];
u32 format_count;
vkGetPhysicalDeviceSurfaceFormatsKHR(vk.physical_device, vk.surface, &format_count, null);
VkSurfaceFormatKHR[] formats = AllocArray!(VkSurfaceFormatKHR)(arena, format_count);
vkGetPhysicalDeviceSurfaceFormatsKHR(vk.physical_device, vk.surface, &format_count, formats.ptr);
u32 mode_count;
vkGetPhysicalDeviceSurfacePresentModesKHR(vk.physical_device, vk.surface, &mode_count, null);
VkPresentModeKHR[] modes = AllocArray!(VkPresentModeKHR)(arena, mode_count);
vkGetPhysicalDeviceSurfacePresentModesKHR(vk.physical_device, vk.surface, &mode_count, modes.ptr);
VkPresentModeKHR present_mode = VK_PRESENT_MODE_FIFO_KHR;
foreach(mode; modes)
{
if (mode == VK_PRESENT_MODE_MAILBOX_KHR)
{
present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
break;
}
}
vk.surface_format = formats[0];
vk.present_mode = present_mode;
}
bool
CreateSwapchain(Vulkan* vk)
{
Push(vk, SI.Swapchain);
bool success = true;
Arena* arena = &vk.frame_arenas[0];
VkSurfaceCapabilitiesKHR cap;
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vk.physical_device, vk.surface, &cap);
static bool initialized = false;
if (!initialized)
{
SelectSwapchainFormats(vk);
}
VkSwapchainCreateInfoKHR info = {
sType: VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
imageArrayLayers: 1,
imageUsage: VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
compositeAlpha: VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
clipped: VK_TRUE,
imageSharingMode: VK_SHARING_MODE_EXCLUSIVE,
minImageCount: cap.minImageCount + 1,
surface: vk.surface,
imageFormat: vk.surface_format.format,
imageColorSpace: vk.surface_format.colorSpace,
imageExtent: {
width: clamp(cast(u32)vk.window.w, cap.minImageExtent.width, cap.maxImageExtent.width),
height: clamp(cast(u32)vk.window.h, cap.minImageExtent.height, cap.maxImageExtent.height),
},
preTransform: cap.currentTransform,
presentMode: vk.present_mode,
};
VkResult result = vkCreateSwapchainKHR(vk.device, &info, null, &vk.swapchain);
success = VkCheck("vkCreateSwapchainKHR failure", result);
u32 count;
vkGetSwapchainImagesKHR(vk.device, vk.swapchain, &count, null);
VkImage[] images = AllocArray!(VkImage)(arena, count);
vkGetSwapchainImagesKHR(vk.device, vk.swapchain, &count, images.ptr);
VkImageView[] views = AllocArray!(VkImageView)(arena, count);
vk.present_images = AllocArray!(ImageView)(&vk.arena, count);
VkImageViewCreateInfo view_info = {
sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
viewType: VK_IMAGE_VIEW_TYPE_2D,
components: {
r: VK_COMPONENT_SWIZZLE_IDENTITY,
g: VK_COMPONENT_SWIZZLE_IDENTITY,
b: VK_COMPONENT_SWIZZLE_IDENTITY,
a: VK_COMPONENT_SWIZZLE_IDENTITY,
},
subresourceRange: {
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
baseMipLevel: 0,
levelCount: 1,
baseArrayLayer: 0,
layerCount: 1,
},
};
foreach(i, image; vk.present_images)
{
vk.present_images[i].image = images[i];
view_info.image = images[i];
view_info.format = vk.surface_format.format;
result = vkCreateImageView(vk.device, &view_info, null, &vk.present_images[i].view);
success = VkCheck("vkCreateImageView failure", result);
}
vk.swapchain_extent.width = info.imageExtent.width;
vk.swapchain_extent.height = info.imageExtent.height;
vk.swapchain_extent.depth = 1;
if (!initialized && success)
{
initialized = true;
}
return success;
}
void
RecreateSwapchain(Vulkan* vk)
{
vkDeviceWaitIdle(vk.device);
Destroy(vk.swapchain, vk.present_images, vk.device);
Destroy(&vk.draw_image, &vk.depth_image, vk.device, vk.vma);
CreateSwapchain(vk);
CreateDrawImages(vk);
WriteDrawImageDesc(vk);
}
bool
InitVMA(Vulkan* vk)
{
Push(vk, SI.Vma);
bool success = true;
VmaVulkanFunctions vk_functions = {
vkGetInstanceProcAddr: vkGetInstanceProcAddr,
vkGetDeviceProcAddr: vkGetDeviceProcAddr,
};
VmaAllocatorCreateInfo info = {
flags: VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT,
vulkanApiVersion: VK_MAKE_API_VERSION(0, 1, 3, 0),
pVulkanFunctions: &vk_functions,
physicalDevice: vk.physical_device,
device: vk.device,
instance: vk.instance,
};
VkResult result = vmaCreateAllocator(&info, &vk.vma);
success = VkCheck("vmaCreateAllocator failure", result);
return success;
}
bool
InitDevice(Vulkan* vk)
{
Push(vk, SI.Device);
bool success = false;
Arena* arena = &vk.frame_arenas[0];
u32 count;
vkEnumeratePhysicalDevices(vk.instance, &count, null);
VkPhysicalDevice[] devices = AllocArray!(VkPhysicalDevice)(arena, count);
vkEnumeratePhysicalDevices(vk.instance, &count, devices.ptr);
VkPhysicalDevice physical_device = null;
bool discrete_candidate = false;
QueueInfo candidate = {
gfx_index: -1,
tfer_index: -1,
single_queue: false,
};
foreach(dev; devices)
{
QueueInfo current = CheckQueueProperties(arena, dev, vk.surface);
b32 discrete = false;
if (current.gfx_index < 0)
continue;
if (!CheckDeviceProperties(arena, dev, vk.surface, &discrete))
continue;
if (discrete_candidate && !discrete)
continue;
if (!CheckDeviceFeatures(dev))
continue;
discrete_candidate = cast(bool)discrete;
candidate = current;
physical_device = dev;
if (discrete_candidate && !candidate.single_queue)
continue;
}
if (physical_device)
{
VkDeviceQueueCreateInfo[2] queue_info;
f32 priority = 1.0f;
count = 1;
queue_info[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queue_info[0].queueFamilyIndex = candidate.gfx_index;
queue_info[0].queueCount = 1;
queue_info[0].pQueuePriorities = &priority;
queue_info[0].flags = 0;
if (!candidate.single_queue)
{
queue_info[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queue_info[1].queueFamilyIndex = candidate.tfer_index;
queue_info[1].queueCount = 1;
queue_info[1].pQueuePriorities = &priority;
queue_info[1].flags = 0;
count += 1;
}
VkPhysicalDeviceVulkan13Features features_13 = {
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
synchronization2: VK_TRUE,
dynamicRendering: VK_TRUE,
};
VkPhysicalDeviceVulkan12Features features_12 = {
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
pNext: &features_13,
descriptorIndexing: VK_TRUE,
bufferDeviceAddress: VK_TRUE,
descriptorBindingUniformBufferUpdateAfterBind: VK_TRUE,
descriptorBindingSampledImageUpdateAfterBind: VK_TRUE,
descriptorBindingStorageImageUpdateAfterBind: VK_TRUE,
descriptorBindingStorageBufferUpdateAfterBind: VK_TRUE,
descriptorBindingPartiallyBound: VK_TRUE,
shaderSampledImageArrayNonUniformIndexing: VK_TRUE,
shaderUniformBufferArrayNonUniformIndexing: VK_TRUE,
runtimeDescriptorArray: VK_TRUE,
storageBuffer8BitAccess: VK_TRUE,
};
VkPhysicalDeviceVulkan11Features features_11 = {
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
pNext: &features_12,
};
VkPhysicalDeviceFeatures2 features = {
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
pNext: &features_11,
features: {
shaderUniformBufferArrayDynamicIndexing: VK_TRUE,
shaderSampledImageArrayDynamicIndexing: VK_TRUE,
shaderStorageBufferArrayDynamicIndexing: VK_TRUE,
shaderStorageImageArrayDynamicIndexing: VK_TRUE,
samplerAnisotropy: VK_TRUE,
},
};
VkDeviceCreateInfo device_info = {
sType: VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
pNext: &features,
ppEnabledExtensionNames: VK_DEVICE_EXTENSIONS.ptr,
enabledExtensionCount: cast(u32)VK_DEVICE_EXTENSIONS.length,
queueCreateInfoCount: count,
pQueueCreateInfos: queue_info.ptr,
pEnabledFeatures: null,
};
VkResult result = vkCreateDevice(physical_device, &device_info, null, &vk.device);
if (result != VK_SUCCESS)
{
Logf("vkCreateDevices failure: %s", VkResultStr(result));
}
else
{
LoadDeviceFunctions(vk);
vkGetDeviceQueue(
vk.device,
candidate.gfx_index,
0,
&candidate.gfx_queue
);
if (!candidate.single_queue)
{
vkGetDeviceQueue(
vk.device,
candidate.tfer_index,
candidate.tfer_index == candidate.gfx_index ? 1 : 0,
&candidate.tfer_queue
);
}
else
{
candidate.tfer_queue = candidate.gfx_queue;
candidate.tfer_index = candidate.gfx_index;
}
vk.physical_device = physical_device;
vk.queues = candidate;
success = true;
}
}
return success;
}
bool
CheckDeviceFeatures(VkPhysicalDevice device)
{
VkPhysicalDeviceFeatures2 features2 = { sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
VkPhysicalDeviceVulkan12Features features_12 = { sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES };
VkPhysicalDeviceVulkan13Features features_13 = { sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES };
features2.pNext = &features_12;
vkGetPhysicalDeviceFeatures2(device, &features2);
features2.pNext = &features_13;
vkGetPhysicalDeviceFeatures2(device, &features2);
VkPhysicalDeviceFeatures features = features2.features;
bool result = true;
result &= cast(bool)features.shaderUniformBufferArrayDynamicIndexing;
result &= cast(bool)features.shaderSampledImageArrayDynamicIndexing;
result &= cast(bool)features.shaderStorageBufferArrayDynamicIndexing;
result &= cast(bool)features.shaderStorageImageArrayDynamicIndexing;
result &= cast(bool)features.samplerAnisotropy;
result &= cast(bool)features_12.descriptorIndexing;
result &= cast(bool)features_12.bufferDeviceAddress;
result &= cast(bool)features_12.descriptorBindingUniformBufferUpdateAfterBind;
result &= cast(bool)features_12.descriptorBindingSampledImageUpdateAfterBind;
result &= cast(bool)features_12.descriptorBindingStorageImageUpdateAfterBind;
result &= cast(bool)features_12.descriptorBindingStorageBufferUpdateAfterBind;
result &= cast(bool)features_12.descriptorBindingPartiallyBound;
result &= cast(bool)features_12.runtimeDescriptorArray;
result &= cast(bool)features_12.shaderSampledImageArrayNonUniformIndexing;
result &= cast(bool)features_12.shaderUniformBufferArrayNonUniformIndexing;
result &= cast(bool)features_12.timelineSemaphore;
result &= cast(bool)features_12.storageBuffer8BitAccess;
result &= cast(bool)features_13.synchronization2;
result &= cast(bool)features_13.dynamicRendering;
return result;
}
bool
CheckDeviceProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surface, b32* discrete)
{
bool success = false;
VkPhysicalDeviceProperties props;
vkGetPhysicalDeviceProperties(device, &props);
if (VK_API_VERSION_MINOR(props.apiVersion) >= 3)
{
u32 ext_count;
vkEnumerateDeviceExtensionProperties(device, null, &ext_count, null);
VkExtensionProperties[] ext_props = AllocArray!(VkExtensionProperties)(arena, ext_count);
vkEnumerateDeviceExtensionProperties(device, null, &ext_count, ext_props.ptr);
i32 matched = 0;
foreach(prop; ext_props)
{
foreach(ext; VK_DEVICE_EXTENSIONS)
{
if (strcmp(cast(char*)prop.extensionName, ext) == 0)
{
matched += 1;
break;
}
}
}
if (matched == VK_DEVICE_EXTENSIONS.length)
{
u32 fmt_count, present_count;
vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &fmt_count, null);
vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, null);
*discrete = props.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
success = fmt_count && present_count;
}
}
return success;
}
QueueInfo
CheckQueueProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surface)
{
const u32 T_BIT = VK_QUEUE_TRANSFER_BIT;
const u32 C_BIT = VK_QUEUE_COMPUTE_BIT;
const u32 G_BIT = VK_QUEUE_GRAPHICS_BIT;
const u32 S_BIT = VK_QUEUE_SPARSE_BINDING_BIT;
QueueInfo current = {
gfx_index: -1,
tfer_index: -1,
single_queue: false,
};
u32 count;
vkGetPhysicalDeviceQueueFamilyProperties(device, &count, null);
VkQueueFamilyProperties[] properties = AllocArray!(VkQueueFamilyProperties)(arena, count);
vkGetPhysicalDeviceQueueFamilyProperties(device, &count, properties.ptr);
if (count == 1 && properties[0].queueCount == 1 && BitEq(properties[0].queueFlags, T_BIT | C_BIT | G_BIT))
{
current.gfx_index = current.tfer_index = 0;
current.single_queue = true;
}
else
{
bool sparse = false, tfer_only = false;
foreach(i, prop; properties)
{
b32 surface_support;
vkGetPhysicalDeviceSurfaceSupportKHR(device, cast(u32)i, surface, &surface_support);
if (current.gfx_index < 0 && surface_support && BitEq(prop.queueFlags, G_BIT))
{
current.gfx_index = cast(i32)i;
continue;
}
if (BitEq(prop.queueFlags, T_BIT | S_BIT) && !BitEq(prop.queueFlags, G_BIT | C_BIT))
{
sparse = true;
tfer_only = true;
current.tfer_index = cast(i32)i;
continue;
}
if (!(sparse && tfer_only) && BitEq(prop.queueFlags, T_BIT | S_BIT))
{
sparse = true;
current.tfer_index = cast(i32)i;
continue;
}
if (!sparse && !BitEq(prop.queueFlags, T_BIT) && BitEq(prop.queueFlags, C_BIT))
{
tfer_only = true;
current.tfer_index = cast(i32)i;
continue;
}
if (!sparse && !tfer_only && BitEq(prop.queueFlags, C_BIT))
{
current.tfer_index = cast(i32)i;
}
}
if (current.tfer_index < 0)
{
current.tfer_index = current.gfx_index;
}
}
return current;
}
pragma(inline): void
Push(Vulkan* vk, StepInitialized step)
{
Node!(SI)* node = Alloc!(Node!(SI));
node.value = step;
PushFront(&vk.cleanup_list, node, null);
}
void
DestroyRenderer(Vulkan* vk)
{
foreach(i, arena; vk.frame_arenas)
{
Free(vk.frame_arenas.ptr + i);
}
Free(&vk.arena);
}
void
Destroy(VkInstance instance)
{
if (instance)
{
vkDestroyInstance(instance, null);
}
}
void
Destroy(VkDebugUtilsMessengerEXT dbg, VkInstance instance)
{
debug
{
if (dbg)
{
vkDestroyDebugUtilsMessengerEXT(instance, dbg, null);
}
}
}
void
Destroy(VkSurfaceKHR surface, VkInstance instance)
{
if (surface)
{
vkDestroySurfaceKHR(instance, surface, null);
}
}
void
Destroy(VkDevice device)
{
if (device)
{
vkDestroyDevice(device, null);
}
}
void
Destroy(VmaAllocator vma)
{
if (vma)
{
vmaDestroyAllocator(vma);
}
}
void
Destroy(VkSwapchainKHR swapchain, ImageView[] views, VkDevice device)
{
foreach(view; views)
{
if (view.view)
{
vkDestroyImageView(device, view.view, null);
}
}
if (swapchain)
{
vkDestroySwapchainKHR(device, swapchain, null);
}
}
void
Destroy(ImageView* view, VkDevice device, VmaAllocator vma)
{
if (view.view)
{
vkDestroyImageView(device, view.view, null);
}
if (view.image)
{
vmaDestroyImage(vma, view.image, view.alloc);
}
}
void
Destroy(VkDescriptorPool pool, VkDescriptorSetLayout[] layouts, VkPipelineLayout pipeline_layout, VkSampler sampler, VkDevice device)
{
if (sampler)
{
vkDestroySampler(device, sampler, null);
}
if (pipeline_layout)
{
vkDestroyPipelineLayout(device, pipeline_layout, null);
}
foreach(layout; layouts)
{
if (layout)
{
vkDestroyDescriptorSetLayout(device, layout, null);
}
}
if (pool)
{
vkDestroyDescriptorPool(device, pool, null);
}
}
void
Destroy(ImageView* draw, ImageView* depth, VkDevice device, VmaAllocator vma)
{
Destroy(draw, device, vma);
Destroy(depth, device, vma);
}
void
DestroyFS(Vulkan* vk)
{
if (vk.imm_fence)
{
vkDestroyFence(vk.device, vk.imm_fence, null);
}
if (vk.imm_cmd)
{
vkFreeCommandBuffers(vk.device, vk.imm_pool, 1, &vk.imm_cmd);
}
if (vk.imm_pool)
{
vkDestroyCommandPool(vk.device, vk.imm_pool, null);
}
if (vk.comp_cmd)
{
vkFreeCommandBuffers(vk.device, vk.comp_cmd_pool, 1, &vk.comp_cmd);
}
if (vk.comp_cmd_pool)
{
vkDestroyCommandPool(vk.device, vk.comp_cmd_pool, null);
}
if (vk.comp_fence)
{
vkDestroyFence(vk.device, vk.comp_fence, null);
}
foreach(sem; vk.submit_sems)
{
if (sem)
{
vkDestroySemaphore(vk.device, sem, null);
}
}
foreach(i; 0 .. FRAME_OVERLAP)
{
if (vk.render_fences[i])
{
vkDestroyFence(vk.device, vk.render_fences[i], null);
}
if (vk.cmd_pools[i])
{
vkFreeCommandBuffers(vk.device, vk.cmd_pools[i], 1, &vk.cmds[i]);
}
if (vk.cmd_pools[i])
{
vkDestroyCommandPool(vk.device, vk.cmd_pools[i], null);
}
if (vk.acquire_sems[i])
{
vkDestroySemaphore(vk.device, vk.acquire_sems[i], null);
}
}
}
bool
InitInstance(Vulkan* vk)
{
Push(vk, SI.Instance);
bool success = true;
Arena* arena = &vk.frame_arenas[0];
u32 count;
vkEnumerateInstanceLayerProperties(&count, null);
VkLayerProperties[] layers = AllocArray!(VkLayerProperties)(arena, count);
vkEnumerateInstanceLayerProperties(&count, layers.ptr);
foreach(i, layer; layers)
{
if (strcmp(cast(char*)&layer.layerName, "VK_LAYER_KHRONOS_validation") == 0)
{
g_VLAYER_SUPPORT = true;
break;
}
}
const char*[] instance_layers = g_VLAYER_SUPPORT && BUILD_DEBUG ? VK_INSTANCE_LAYERS_DEBUG : VK_INSTANCE_LAYERS;
const char*[] instance_ext = g_VLAYER_SUPPORT && BUILD_DEBUG ? VK_INSTANCE_EXT_DEBUG : VK_INSTANCE_EXT;
VkApplicationInfo app_info = {
sType: VK_STRUCTURE_TYPE_APPLICATION_INFO,
pApplicationName: "Video Game",
applicationVersion: VK_MAKE_API_VERSION(0, 0, 0, 1),
pEngineName: "Gears",
engineVersion: VK_MAKE_API_VERSION(0, 0, 0, 1),
apiVersion: VK_MAKE_API_VERSION(0, 1, 3, 0),
};
VkInstanceCreateInfo instance_info = {
sType: VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
pApplicationInfo: &app_info,
enabledLayerCount: cast(u32)instance_layers.length,
ppEnabledLayerNames: instance_layers.ptr,
enabledExtensionCount: cast(u32)instance_ext.length,
ppEnabledExtensionNames: instance_ext.ptr,
};
VkValidationFeatureEnableEXT validation_enable = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT;
VkValidationFeaturesEXT validation_features = {
sType: VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
enabledValidationFeatureCount: 1,
pEnabledValidationFeatures: &validation_enable,
};
debug
{
if (g_VLAYER_SUPPORT && g_DEBUG_PRINTF)
{
instance_info.pNext = &validation_features;
}
}
VkResult result = vkCreateInstance(&instance_info, null, &vk.instance);
success = VkCheck("vkCreateInstance failure", result);
return success;
}
bool
InitSurface(Vulkan* vk)
{
Push(vk, SI.Surface);
version(linux)
{
VkXcbSurfaceCreateInfoKHR surface_info = {
sType: VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
connection: vk.window.conn,
window: vk.window.window,
};
VkResult result = vkCreateXcbSurfaceKHR(vk.instance, &surface_info, null, &vk.surface);
}
version(Windows)
{
VkWin32SurfaceCreateInfoKHR surface_info = {
sType: VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
hinstance: vk.window.instance,
hwnd: vk.window.handle,
};
VkResult result = vkCreateWin32SurfaceKHR(vk.instance, &surface_info, null, &vk.surface);
}
bool success = VkCheck("InitSurface failure", result);
return success;
}
void
EnableVLayers(Vulkan* vk)
{
debug
{
Push(vk, SI.Debug);
if (g_VLAYER_SUPPORT)
{
VkDebugUtilsMessengerCreateInfoEXT info = {
sType: VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
messageSeverity: VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
messageType: VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
pfnUserCallback: cast(PFN_vkDebugUtilsMessengerCallbackEXT)&DebugCallback,
};
if (g_DEBUG_PRINTF)
{
info.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
}
VkResult result = vkCreateDebugUtilsMessengerEXT(vk.instance, &info, null, &vk.dbg_msg);
if (result != VK_SUCCESS)
{
Logf("EnableVLayers failed to initialize, will continue without validation: %s", VkResultStr(result));
}
}
else
{
Logf("EnableVLayers warning: Not supported on current device, continuing without");
}
}
}
void
PrintShaderDisassembly(Vulkan* vk, Pipeline* pipeline, VkShaderStageFlagBits stage)
{
version(AMD_GPU)
{
debug
{
u64 size;
VkResult result = vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, null);
if (result == VK_SUCCESS)
{
u8[] buf = AllocArray!(u8)(&vk.frame_arenas[vk.frame_index], size);
vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, buf.ptr);
Logf("DISASSEMBLY:\n%r", buf);
}
}
}
}