diff --git a/vulkan.d b/vulkan.d index 05f7a92..c992793 100644 --- a/vulkan.d +++ b/vulkan.d @@ -460,7 +460,7 @@ struct Vulkan PipelineHandles[] pipeline_handles; u32 pipeline_count; - VkPipeline[FRAME_OVERLAP] last_pipeline; + VkPipeline[FRAME_OVERLAP] last_pipelines; DescSetLayout global_set_layout; DescSet global_set; @@ -481,6 +481,72 @@ struct Vulkan f32[4] color_clear; f32[4] depth_clear; + @property VkCommandPool + cmd_pool() + { + return this.cmd_pools[this.frame_index]; + } + + @property VkCommandBuffer + cmd() + { + return this.cmds[this.frame_index]; + } + + @property VkSemaphore + acquire_sem() + { + return this.acquire_sems[this.frame_index]; + } + + @property VkSemaphore* + acquire_sem_ptr() + { + return this.acquire_sems.ptr + this.frame_index; + } + + @property VkFence + render_fence() + { + return this.render_fences[this.frame_index]; + } + + @property VkFence* + render_fence_ptr() + { + return this.render_fences.ptr + this.frame_index; + } + + @property VkPipeline + last_pipeline() + { + return this.last_pipelines[this.frame_index]; + } + + @property VkPipeline + last_pipeline(VkPipeline pipeline) + { + return this.last_pipelines[this.frame_index] = pipeline; + } + + @property VkSemaphore + submit_sem() + { + return this.submit_sems[this.image_index]; + } + + @property ref ImageView + present_image() + { + return this.present_images[this.image_index]; + } + + Arena* + FrameArena() + { + return &this.frame_arenas[this.frame_index]; + } + alias queues this; } @@ -505,45 +571,45 @@ struct QueueInfo bool single_queue; } +__gshared Vulkan g_vk; + u8[] CONVERT_SHADER_BYTES = cast(u8[])import("convert.comp.spv"); DescSetLayout -CreateDescSetLayout(Vulkan* vk, DescLayoutBinding[] bindings) +CreateDescSetLayout(DescLayoutBinding[] bindings) { VkDescriptorSetLayoutCreateInfo layout_info = { - sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, bindingCount: cast(u32)bindings.length, - pBindings: bindings.ptr, + pBindings: bindings.ptr, }; DescSetLayout layout; - VkResult result = vkCreateDescriptorSetLayout(vk.device, &layout_info, null, &layout); + VkResult result = vkCreateDescriptorSetLayout(g_vk.device, &layout_info, null, &layout); VkCheckA("vkCreateDescriptorSetLayout failure", result); return layout; } DescSet -AllocDescSet(Vulkan* vk, DescSetLayout layout, u32 dynamic_count = 0) +AllocDescSet(DescSetLayout layout, u32 dynamic_count = 0) { VkDescriptorSetAllocateInfo alloc_info = { - sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, descriptorSetCount: 1, - pSetLayouts: &layout, - descriptorPool: vk.active_pool, + pSetLayouts: &layout, + descriptorPool: g_vk.active_pool, }; - DescSet set = { - dynamic_count: dynamic_count, - }; - VkResult result = vkAllocateDescriptorSets(vk.device, &alloc_info, &set.handle); + DescSet set = { dynamic_count: dynamic_count }; + VkResult result = vkAllocateDescriptorSets(g_vk.device, &alloc_info, &set.handle); if(result == VK_ERROR_OUT_OF_POOL_MEMORY || result == VK_ERROR_FRAGMENTED_POOL) { - PushDescriptorPool(vk); + PushDescriptorPool(); - alloc_info.descriptorPool = vk.active_pool; + alloc_info.descriptorPool = g_vk.active_pool; - result = vkAllocateDescriptorSets(vk.device, &alloc_info, &set.handle); + result = vkAllocateDescriptorSets(g_vk.device, &alloc_info, &set.handle); VkCheckA("vkAllocateDescriptorSets failure", result); } @@ -551,7 +617,7 @@ AllocDescSet(Vulkan* vk, DescSetLayout layout, u32 dynamic_count = 0) } PipelineLayout -CreatePipelineLayout(T)(Vulkan* vk, T layouts, u32 push_const_size, bool compute = false) if(is(T: DescSetLayout) || is(T: DescSetLayout[])) +CreatePipelineLayout(T)(T layouts, u32 push_const_size, bool compute = false) if(is(T: DescSetLayout) || is(T: DescSetLayout[])) { VkShaderStageFlagBits stage = (compute ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT); @@ -559,14 +625,14 @@ CreatePipelineLayout(T)(Vulkan* vk, T layouts, u32 push_const_size, bool compute static if(is(T: DescSetLayout)) { - desc_layouts = Alloc!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], 2); - desc_layouts[0] = vk.global_set_layout; + desc_layouts = Alloc!(DescSetLayout)(g_vk.FrameArena(), 2); + desc_layouts[0] = g_vk.global_set_layout; desc_layouts[1] = layouts; } else static if(is(T: DescSetLayout[])) { - desc_layouts = Alloc!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], layouts.length + 1); - desc_layouts[0] = vk.global_set_layout; + desc_layouts = Alloc!(DescSetLayout)(g_vk.FrameArena(), layouts.length + 1); + desc_layouts[0] = g_vk.global_set_layout; foreach(i; 0 .. layouts.length) { @@ -575,43 +641,43 @@ CreatePipelineLayout(T)(Vulkan* vk, T layouts, u32 push_const_size, bool compute } VkPushConstantRange const_range = { - offset: 0, - size: cast(VkDeviceSize)push_const_size, + offset: 0, + size: cast(VkDeviceSize)push_const_size, stageFlags: stage, }; VkPipelineLayoutCreateInfo layout_info = { - sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, - setLayoutCount: cast(u32)desc_layouts.length, - pSetLayouts: desc_layouts.ptr, + sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + setLayoutCount: cast(u32)desc_layouts.length, + pSetLayouts: desc_layouts.ptr, pushConstantRangeCount: (push_const_size > 0 ? 1 : 0), - pPushConstantRanges: (push_const_size > 0 ? &const_range : null), + pPushConstantRanges: (push_const_size > 0 ? &const_range : null), }; PipelineLayout layout; - VkResult result = vkCreatePipelineLayout(vk.device, &layout_info, null, &layout); + VkResult result = vkCreatePipelineLayout(g_vk.device, &layout_info, null, &layout); VkCheckA("CreatePipelineLayout failure", result); return layout; } void -CreateBuffer(Vulkan* vk, Descriptor* desc, BufferType type, u64 size, bool host_visible, DescType desc_type, u32 binding) +CreateBuffer(Descriptor* desc, BufferType type, u64 size, bool host_visible, DescType desc_type, u32 binding) { desc.type = desc_type; desc.binding = binding; - CreateBuffer(vk, &desc.buf, type, size, host_visible); + CreateBuffer(&desc.buf, type, size, host_visible); } void -CreateBuffer(Vulkan* vk, Buffer* buf, BufferType type, u64 size, bool host_visible) +CreateBuffer(Buffer* buf, BufferType type, u64 size, bool host_visible) { assert(type != BT.None, "CreateBuffer failure: type is None"); VkBufferCreateInfo buffer_info = { sType: VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, usage: type, - size: size, + size: size, }; VmaAllocationCreateInfo alloc_info = { @@ -621,25 +687,25 @@ CreateBuffer(Vulkan* vk, Buffer* buf, BufferType type, u64 size, bool host_visib if(host_visible) { - alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; alloc_info.preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT; } else { - buffer_info.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT; + buffer_info.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT; alloc_info.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; } - u32[2] indices = [vk.queues.gfx_index, vk.queues.tfer_index]; - if(vk.queues.gfx_index != vk.queues.tfer_index) + u32[2] indices = [g_vk.queues.gfx_index, g_vk.queues.tfer_index]; + if(g_vk.queues.gfx_index != g_vk.queues.tfer_index) { - buffer_info.sharingMode = VK_SHARING_MODE_CONCURRENT; + buffer_info.sharingMode = VK_SHARING_MODE_CONCURRENT; buffer_info.queueFamilyIndexCount = 2; - buffer_info.pQueueFamilyIndices = indices.ptr; + buffer_info.pQueueFamilyIndices = indices.ptr; } VmaAllocationInfo vma_info; - VkResult result = vmaCreateBuffer(vk.vma, &buffer_info, &alloc_info, &buf.buffer, &buf.alloc, &vma_info); + VkResult result = vmaCreateBuffer(g_vk.vma, &buffer_info, &alloc_info, &buf.buffer, &buf.alloc, &vma_info); // TODO: handle errors here then reallocate buffer VkCheck("CreateBuffer failure: vmaCreateBuffer error", result); @@ -647,66 +713,61 @@ CreateBuffer(Vulkan* vk, Buffer* buf, BufferType type, u64 size, bool host_visib } void -BindBuffers(Vulkan* vk, Buffer* index_buffer, Buffer* vertex_buffer) +BindBuffers(Buffer* index_buffer, Buffer* vertex_buffer) { VkDeviceSize offset = 0; - vkCmdBindIndexBuffer(vk.cmds[vk.frame_index], index_buffer.buffer, 0, VK_INDEX_TYPE_UINT32); - vkCmdBindVertexBuffers(vk.cmds[vk.frame_index], 0, 1, &vertex_buffer.buffer, &offset); + vkCmdBindIndexBuffer(g_vk.cmd, index_buffer.buffer, 0, VK_INDEX_TYPE_UINT32); + vkCmdBindVertexBuffers(g_vk.cmd, 0, 1, &vertex_buffer.buffer, &offset); } void -BindBuffers(T)(Vulkan* vk, MappedBuffer!(u32)* index_buffer, MappedBuffer!(T)* vertex_buffer) +BindBuffers(T)(MappedBuffer!(u32)* index_buffer, MappedBuffer!(T)* vertex_buffer) { VkDeviceSize offset = 0; - vkCmdBindIndexBuffer(vk.cmds[vk.frame_index], index_buffer.buffer, 0, VK_INDEX_TYPE_UINT32); - vkCmdBindVertexBuffers(vk.cmds[vk.frame_index], 0, 1, &vertex_buffer.buffer, &offset); + vkCmdBindIndexBuffer(g_vk.cmd, index_buffer.buffer, 0, VK_INDEX_TYPE_UINT32); + vkCmdBindVertexBuffers(g_vk.cmd, 0, 1, &vertex_buffer.buffer, &offset); } MappedBuffer!(T) -CreateMappedBuffer(T)(Vulkan* vk, BufferType type, u64 count) +CreateMappedBuffer(T)(BufferType type, u64 count) { MappedBuffer!(T) buf; - CreateBuffer(vk, &buf.base, type, T.sizeof * count, true); - buf.data = MapBuffer!(T)(vk, &buf.base, count); + CreateBuffer(&buf.base, type, T.sizeof * count, true); + buf.data = MapBuffer!(T)(&buf.base, count); return buf; } T[] -MapBuffer(T)(Vulkan* vk, Buffer* buffer, u64 count) +MapBuffer(T)(Buffer* buffer, u64 count) { void* ptr; - vmaMapMemory(vk.vma, buffer.alloc, &ptr); + vmaMapMemory(g_vk.vma, buffer.alloc, &ptr); return (cast(T*)ptr)[0 .. count]; } -VkImage -CurrentImage(Vulkan* vk) -{ - return vk.present_images[vk.image_index].image; -} - void -BeginFrame(Vulkan* vk) +BeginFrame() { // TODO: move vkWaitForFences so it no longer holds up the frame, will need to change how fences are handled in regards to images though - VkResult result = vkWaitForFences(vk.device, 1, vk.render_fences.ptr + vk.frame_index, VK_TRUE, 1000000000); + VkResult result = vkWaitForFences(g_vk.device, 1, g_vk.render_fence_ptr, VK_TRUE, 1000000000); VkCheckA("BeginFrame failure: vkWaitForFences error", result); - result = vkResetFences(vk.device, 1, vk.render_fences.ptr + vk.frame_index); + result = vkResetFences(g_vk.device, 1, g_vk.render_fence_ptr); VkCheckA("BeginFrame failure: vkResetFences error", result); + // ?? dont know why this is 3 (should have commented it) for(u64 i = 0; i < 3; i += 1) { - result = vkAcquireNextImageKHR(vk.device, vk.swapchain, 1000000000, vk.acquire_sems[vk.frame_index], null, &vk.image_index); - if(result == VK_ERROR_OUT_OF_DATE_KHR || vk.recreate_swapchain) + result = vkAcquireNextImageKHR(g_vk.device, g_vk.swapchain, 1000000000, g_vk.acquire_sem, null, &g_vk.image_index); + if(result == VK_ERROR_OUT_OF_DATE_KHR || g_vk.recreate_swapchain) { - RecreateSwapchain(vk); + RecreateSwapchain(); VkSemaphoreCreateInfo sem_info = { sType: VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO }; - vkDestroySemaphore(vk.device, vk.acquire_sems[vk.frame_index], null); - VkResult sem_res = vkCreateSemaphore(vk.device, &sem_info, null, &vk.acquire_sems[vk.frame_index]); + vkDestroySemaphore(g_vk.device, g_vk.acquire_sem, null); + VkResult sem_res = vkCreateSemaphore(g_vk.device, &sem_info, null, g_vk.acquire_sem_ptr); VkCheckA("Recreating swapchain sem reset failure", sem_res); continue; @@ -719,7 +780,7 @@ BeginFrame(Vulkan* vk) break; } - result = vkResetCommandBuffer(vk.cmds[vk.frame_index], 0); + result = vkResetCommandBuffer(g_vk.cmd, 0); VkCheckA("BeginFrame failure: vkResetCommandBuffer failure", result); VkCommandBufferBeginInfo cmd_info = { @@ -727,113 +788,113 @@ BeginFrame(Vulkan* vk) flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, }; - result = vkBeginCommandBuffer(vk.cmds[vk.frame_index], &cmd_info); + result = vkBeginCommandBuffer(g_vk.cmd, &cmd_info); VkCheckA("BeginFrame failure: vkBeginCommandBuffer error", result); } void -SetClearColors(Vulkan* vk, f32[4] color_clear, f32[4] depth_clear) +SetClearColors(f32[4] color_clear, f32[4] depth_clear) { - vk.color_clear = color_clear; - vk.depth_clear = depth_clear; + g_vk.color_clear = color_clear; + g_vk.depth_clear = depth_clear; } void -BeginRendering(Vulkan* vk) +BeginRendering() { - Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); - Transition(vk.cmds[vk.frame_index], &vk.depth_image, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL); + Transition(g_vk.cmd, &g_vk.draw_image, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + Transition(g_vk.cmd, &g_vk.depth_image, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL); - VkImage image = CurrentImage(vk); - Transition(vk.cmds[vk.frame_index], image, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); + VkImage image = g_vk.present_image.image; + Transition(g_vk.cmd, image, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); VkClearValue[2] clear_color = [ - { color: { float32: vk.color_clear } }, - { color: { float32: vk.depth_clear } }, + { color: { float32: g_vk.color_clear } }, + { color: { float32: g_vk.depth_clear } }, ]; VkRenderPassBeginInfo pass_info = { sType: VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, - renderPass: vk.render_pass, - framebuffer: vk.framebuffer, + renderPass: g_vk.render_pass, + framebuffer: g_vk.framebuffer, renderArea: { offset: { x: 0, y: 0 }, extent: { - width: vk.swapchain_extent.width, - height: vk.swapchain_extent.height, + width: g_vk.swapchain_extent.width, + height: g_vk.swapchain_extent.height, }, }, clearValueCount: cast(u32)clear_color.length, pClearValues: clear_color.ptr, }; - vkCmdBeginRenderPass(vk.cmds[vk.frame_index], &pass_info, VK_SUBPASS_CONTENTS_INLINE); + vkCmdBeginRenderPass(g_vk.cmd, &pass_info, VK_SUBPASS_CONTENTS_INLINE); } u32[2] -GetExtent(Vulkan* vk) +GetExtent() { u32[2] extent; - extent[0] = vk.swapchain_extent.width; - extent[1] = vk.swapchain_extent.height; + extent[0] = g_vk.swapchain_extent.width; + extent[1] = g_vk.swapchain_extent.height; return extent; } f32 -GetAspect(Vulkan* vk) +GetAspect() { - return cast(f32)(vk.swapchain_extent.width) / cast(f32)(vk.swapchain_extent.height); + return cast(f32)(g_vk.swapchain_extent.width) / cast(f32)(g_vk.swapchain_extent.height); } void -PrepAuxImage(Vulkan* vk, ImageView* view) +PrepAuxImage(ImageView* view) { - Transition(vk.cmds[vk.frame_index], view, VK_IMAGE_LAYOUT_GENERAL); + Transition(g_vk.cmd, view, VK_IMAGE_LAYOUT_GENERAL); } void -PrepComputeDrawImage(Vulkan* vk) +PrepComputeDrawImage() { - Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_GENERAL); + Transition(g_vk.cmd, &g_vk.draw_image, VK_IMAGE_LAYOUT_GENERAL); } void -FinishRendering(Vulkan* vk) +FinishRendering() { - vkCmdEndRenderPass(vk.cmds[vk.frame_index]); + vkCmdEndRenderPass(g_vk.cmd); } void -SubmitAndPresent(Vulkan* vk) +SubmitAndPresent() { scope(exit) { - vk.last_pipeline[vk.frame_index] = null; - vk.frame_index = (vk.frame_index + 1) % FRAME_OVERLAP; + g_vk.last_pipeline = null; + g_vk.frame_index = (g_vk.frame_index + 1) % FRAME_OVERLAP; } - VkImage image = CurrentImage(vk); - VkSemaphore acquire_sem = vk.acquire_sems[vk.frame_index]; - VkSemaphore submit_sem = vk.submit_sems[vk.image_index]; + VkImage image = g_vk.present_image.image; + VkSemaphore acquire_sem = g_vk.acquire_sem; + VkSemaphore submit_sem = g_vk.submit_sem; - Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL); + Transition(g_vk.cmd, &g_vk.draw_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL); VkExtent2D extent = { - width: vk.swapchain_extent.width, - height: vk.swapchain_extent.height, + width: g_vk.swapchain_extent.width, + height: g_vk.swapchain_extent.height, }; // TODO: Find out how to copy from same dimension images (pretty sure its not blitting) - Copy(vk.cmds[vk.frame_index], &vk.draw_image.view.base, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, extent, extent); + Copy(g_vk.cmd, &g_vk.draw_image.view.base, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, extent, extent); - Transition(vk.cmds[vk.frame_index], image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); + Transition(g_vk.cmd, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - VkResult result = vkEndCommandBuffer(vk.cmds[vk.frame_index]); + VkResult result = vkEndCommandBuffer(g_vk.cmd); VkCheckA("FinishFrame failure: vkEndCommandBuffer error", result); VkCommandBufferSubmitInfo cmd_info = { sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, - commandBuffer: vk.cmds[vk.frame_index], + commandBuffer: g_vk.cmd, }; VkSemaphoreSubmitInfo wait_info = { @@ -860,22 +921,22 @@ SubmitAndPresent(Vulkan* vk) pCommandBufferInfos: &cmd_info, }; - result = vkQueueSubmit2(vk.queues.gfx_queue, 1, &submit_info, vk.render_fences[vk.frame_index]); + result = vkQueueSubmit2(g_vk.queues.gfx_queue, 1, &submit_info, g_vk.render_fence); VkCheckA("FinishFrame failure: vkQueueSubmit2 error", result); VkPresentInfoKHR present_info = { sType: VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, swapchainCount: 1, - pSwapchains: &vk.swapchain, + pSwapchains: &g_vk.swapchain, waitSemaphoreCount: 1, pWaitSemaphores: &submit_sem, - pImageIndices: &vk.image_index, + pImageIndices: &g_vk.image_index, }; - result = vkQueuePresentKHR(vk.queues.gfx_queue, &present_info); - if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR || vk.recreate_swapchain) + result = vkQueuePresentKHR(g_vk.queues.gfx_queue, &present_info); + if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR || g_vk.recreate_swapchain) { - RecreateSwapchain(vk); + RecreateSwapchain(); } else { @@ -884,32 +945,32 @@ SubmitAndPresent(Vulkan* vk) } void -Draw(Vulkan* vk, u32 index_count, u32 instance_count) +Draw(u32 index_count, u32 instance_count) { - vkCmdDraw(vk.cmds[vk.frame_index], index_count, instance_count, 0, 0); + vkCmdDraw(g_vk.cmd, index_count, instance_count, 0, 0); } void -DrawIndexed(Vulkan* vk, u32 idx_count, u32 instance_count, u32 idx_offset, i32 vtx_offset, u32 first_instance) +DrawIndexed(u32 idx_count, u32 instance_count, u32 idx_offset, i32 vtx_offset, u32 first_instance) { - vkCmdDrawIndexed(vk.cmds[vk.frame_index], idx_count, instance_count, idx_offset, vtx_offset, first_instance); + vkCmdDrawIndexed(g_vk.cmd, idx_count, instance_count, idx_offset, vtx_offset, first_instance); } bool -ImmSubmitStart(Vulkan* vk) +ImmSubmitStart() { - VkResult result = vkWaitForFences(vk.device, 1, &vk.imm_fence, true, 999999999); + VkResult result = vkWaitForFences(g_vk.device, 1, &g_vk.imm_fence, true, 999999999); bool success = VkCheck("ImmSubmit failure: vkWaitForFences error", result); if(success) { - result = vkResetFences(vk.device, 1, &vk.imm_fence); + result = vkResetFences(g_vk.device, 1, &g_vk.imm_fence); success = VkCheck("ImmSubmit failure: vkResetFences error", result); } if(success) { - result = vkResetCommandBuffer(vk.imm_cmd, 0); + result = vkResetCommandBuffer(g_vk.imm_cmd, 0); success = VkCheck("ImmSubmit failure: vkResetCommandBuffer error", result); } @@ -920,7 +981,7 @@ ImmSubmitStart(Vulkan* vk) flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, }; - result = vkBeginCommandBuffer(vk.imm_cmd, &cmd_info); + result = vkBeginCommandBuffer(g_vk.imm_cmd, &cmd_info); success = VkCheck("ImmSubmit failure: vkBeginCommandBuffer error", result); } @@ -928,11 +989,11 @@ ImmSubmitStart(Vulkan* vk) } bool -ImmSubmitFinish(Vulkan* vk) +ImmSubmitFinish() { VkCommandBufferSubmitInfo cmd_info = { sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, - commandBuffer: vk.imm_cmd, + commandBuffer: g_vk.imm_cmd, }; VkSubmitInfo2 submit_info = { @@ -941,221 +1002,221 @@ ImmSubmitFinish(Vulkan* vk) pCommandBufferInfos: &cmd_info, }; - VkResult result = vkQueueSubmit2(vk.tfer_queue, 1, &submit_info, vk.imm_fence); + VkResult result = vkQueueSubmit2(g_vk.tfer_queue, 1, &submit_info, g_vk.imm_fence); return VkCheck("ImmSubmit failure: vkQueueSubmit2 error", result); } bool -ImmSubmit(Vulkan* vk, Image* image, VkBufferImageCopy copy, void function(Vulkan*, Image*, VkBufferImageCopy) fn) +ImmSubmit(Image* image, VkBufferImageCopy copy, void function(Image*, VkBufferImageCopy) fn) { - bool success = ImmSubmitStart(vk); + bool success = ImmSubmitStart(); if(success) { - fn(vk, image, copy); + fn(image, copy); - VkResult result = vkEndCommandBuffer(vk.imm_cmd); + VkResult result = vkEndCommandBuffer(g_vk.imm_cmd); success = VkCheck("ImmSubmit failure: vkEndCommandBuffer error", result); } if(success) { - success = ImmSubmitFinish(vk); + success = ImmSubmitFinish(); } return success; } bool -ImmSubmit(Vulkan* vk, Buffer* buf, VkBufferCopy copy, void function(Vulkan*, Buffer*, VkBufferCopy) fn) +ImmSubmit(Buffer* buf, VkBufferCopy copy, void function(Buffer*, VkBufferCopy) fn) { - bool success = ImmSubmitStart(vk); + bool success = ImmSubmitStart(); if(success) { - fn(vk, buf, copy); + fn(buf, copy); - VkResult result = vkEndCommandBuffer(vk.imm_cmd); + VkResult result = vkEndCommandBuffer(g_vk.imm_cmd); success = VkCheck("ImmSubmit failure: vkEndCommandBuffer error", result); } if(success) { - success = ImmSubmitFinish(vk); + success = ImmSubmitFinish(); } return success; } bool -TransferAssets(Vulkan* vk) +TransferAssets() { return true; } void -WaitForTransfers(Vulkan* vk) +WaitForTransfers() { - vkWaitForFences(vk.device, 1, &vk.imm_fence, VK_TRUE, u64.max); + vkWaitForFences(g_vk.device, 1, &g_vk.imm_fence, VK_TRUE, u64.max); } void -CreateImageView(Vulkan* vk, Descriptor* view, u32 w, u32 h, u32 ch, u8[] data, DescType type = DT.Image, u32 binding, u32 index) +CreateImageView(Descriptor* view, u32 w, u32 h, u32 ch, u8[] data, DescType type = DT.Image, u32 binding, u32 index) { view.array_elem = true; view.index = index; - CreateImageView(vk, view, w, h, ch, data, type, binding); + CreateImageView(view, w, h, ch, data, type, binding); } void -CreateImageView(Vulkan* vk, Descriptor* view, u32 w, u32 h, u32 ch, u8[] data, DescType type = DT.Image, u32 binding) +CreateImageView(Descriptor* view, u32 w, u32 h, u32 ch, u8[] data, DescType type = DT.Image, u32 binding) { ImageDescCheck(type); view.type = type; view.binding = binding; - CreateImageView(vk, &view.view, w, h, ch, data); + CreateImageView(&view.view, w, h, ch, data); } void -CreateImageViewTex(Vulkan* vk, Descriptor* desc, u32 w, u32 h, DescType type = DT.Image, u32 binding, u32 index) +CreateImageViewTex(Descriptor* desc, u32 w, u32 h, DescType type = DT.Image, u32 binding, u32 index) { desc.array_elem = true; desc.index = index; - CreateImageViewTex(vk, desc, w, h, type, binding); + CreateImageViewTex(desc, w, h, type, binding); } void -CreateImageViewTex(Vulkan* vk, Descriptor* desc, u32 w, u32 h, DescType type = DT.Image, u32 binding) +CreateImageViewTex(Descriptor* desc, u32 w, u32 h, DescType type = DT.Image, u32 binding) { ImageDescCheck(type); desc.type = type; desc.binding = binding; - CreateImageView(vk, &desc.view, w, h, FMT.RGBA_UNORM, IU.Texture); + CreateImageView(&desc.view, w, h, FMT.RGBA_UNORM, IU.Texture); } void -CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, u32 ch, u8[] data) +CreateImageView(ImageView* view, u32 w, u32 h, u32 ch, u8[] data) { - CreateImageView(vk, view, w, h, FMT.RGBA_UNORM, IU.Texture); + CreateImageView(view, w, h, FMT.RGBA_UNORM, IU.Texture); if(ch == 4) { - bool result = Transfer(vk, view, data, w, h); + bool result = Transfer(view, data, w, h); assert(result); } else { Descriptor buf; - CreateBuffer(vk, &buf, BT.Storage, w * h * ch, false, DT.Storage, 1); - bool result = Transfer(vk, &buf.buf, data); + CreateBuffer(&buf, BT.Storage, w * h * ch, false, DT.Storage, 1); + bool result = Transfer(&buf.buf, data); assert(result, "CreateImageView failure: Buffer Transfer error"); Descriptor conv_view; - CreateImageView(vk, &conv_view, w, h, FMT.RGBA_F32, IU.Convert, DT.StorageImage, 0); + CreateImageView(&conv_view, w, h, FMT.RGBA_F32, IU.Convert, DT.StorageImage, 0); - Write(vk, vk.conv_desc_set, [conv_view, buf]); + Write(g_vk.conv_desc_set, [conv_view, buf]); - BeginComputePass(vk); + BeginComputePass(); - Pipeline pipeline = ch == 1 ? vk.r_to_rgba_pipeline : - ch == 2 ? vk.rg_to_rgba_pipeline : - vk.rgb_to_rgba_pipeline; + Pipeline pipeline = ch == 1 ? g_vk.r_to_rgba_pipeline : + ch == 2 ? g_vk.rg_to_rgba_pipeline : + g_vk.rgb_to_rgba_pipeline; - Bind(vk, pipeline, vk.conv_desc_set, true); + Bind(pipeline, g_vk.conv_desc_set, true); ConvPushConst pc = { x: w, y: h }; vkCmdPushConstants( - vk.comp_cmd, - vk.conv_pipeline_layout, + g_vk.comp_cmd, + g_vk.conv_pipeline_layout, VK_SHADER_STAGE_COMPUTE_BIT, 0, ConvPushConst.sizeof, &pc ); - Transition(vk.comp_cmd, &conv_view, VK_IMAGE_LAYOUT_GENERAL); + Transition(g_vk.comp_cmd, &conv_view, VK_IMAGE_LAYOUT_GENERAL); - Dispatch(vk, vk.comp_cmd); + Dispatch(g_vk.comp_cmd); - Transition(vk.comp_cmd, view, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); + Transition(g_vk.comp_cmd, view, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); VkExtent2D extent = { width: w, height: h }; - Copy(vk.comp_cmd, &conv_view.view.base, &view.base, extent, extent); + Copy(g_vk.comp_cmd, &conv_view.view.base, &view.base, extent, extent); - Transition(vk.comp_cmd, view, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + Transition(g_vk.comp_cmd, view, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); - FinishComputePass(vk); + FinishComputePass(); - vkWaitForFences(vk.device, 1, &vk.comp_fence, VK_TRUE, u64.max); + vkWaitForFences(g_vk.device, 1, &g_vk.comp_fence, VK_TRUE, u64.max); - vkQueueWaitIdle(vk.tfer_queue); + vkQueueWaitIdle(g_vk.tfer_queue); - Destroy(vk, &buf); - Destroy(vk, &conv_view); + Destroy(&buf); + Destroy(&conv_view); } } void -CreateImageView(Vulkan* vk, Descriptor* desc, u32 w, u32 h, Format format, ImageUsage usage, DescType type, u32 binding, u32 index) +CreateImageView(Descriptor* desc, u32 w, u32 h, Format format, ImageUsage usage, DescType type, u32 binding, u32 index) { desc.array_elem = true; desc.index = index; - CreateImageView(vk, desc, w, h, format, usage, type, binding); + CreateImageView(desc, w, h, format, usage, type, binding); } void -CreateImageView(Vulkan* vk, Descriptor* desc, u32 w, u32 h, Format format, ImageUsage usage, DescType type, u32 binding) +CreateImageView(Descriptor* desc, u32 w, u32 h, Format format, ImageUsage usage, DescType type, u32 binding) { ImageDescCheck(type); desc.type = type; desc.binding = binding; - CreateImageView(vk, &desc.view, w, h, format, usage); + CreateImageView(&desc.view, w, h, format, usage); } void -CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, Format format, ImageUsage usage) +CreateImageView(ImageView* view, u32 w, u32 h, Format format, ImageUsage usage) { VmaAllocationCreateInfo alloc_info = { - usage: VMA_MEMORY_USAGE_GPU_ONLY, + usage: VMA_MEMORY_USAGE_GPU_ONLY, requiredFlags: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, }; VkImageCreateInfo image_info = { - sType: VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, - imageType: VK_IMAGE_TYPE_2D, - mipLevels: 1, - arrayLayers: 1, - format: format, - tiling: VK_IMAGE_TILING_OPTIMAL, + sType: VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + imageType: VK_IMAGE_TYPE_2D, + mipLevels: 1, + arrayLayers: 1, + format: format, + tiling: VK_IMAGE_TILING_OPTIMAL, initialLayout: VK_IMAGE_LAYOUT_UNDEFINED, - usage: usage, - samples: VK_SAMPLE_COUNT_1_BIT, + usage: usage, + samples: VK_SAMPLE_COUNT_1_BIT, extent: { - width: w, + width: w, height: h, - depth: 1, + depth: 1, }, }; - u32[2] indices = [vk.gfx_index, vk.tfer_index]; - if(vk.gfx_index != vk.tfer_index) + u32[2] indices = [g_vk.gfx_index, g_vk.tfer_index]; + if(g_vk.gfx_index != g_vk.tfer_index) { - image_info.sharingMode = VK_SHARING_MODE_CONCURRENT; + image_info.sharingMode = VK_SHARING_MODE_CONCURRENT; image_info.queueFamilyIndexCount = 2; - image_info.pQueueFamilyIndices = indices.ptr; + image_info.pQueueFamilyIndices = indices.ptr; } - VkResult result = vmaCreateImage(vk.vma, &image_info, &alloc_info, &view.image, &view.alloc, null); + VkResult result = vmaCreateImage(g_vk.vma, &image_info, &alloc_info, &view.image, &view.alloc, null); // TODO: handle errors and realloc VkCheck("CreateImageView failure: vmaCreateImage error", result); VkImageViewCreateInfo view_info = { - sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, - image: view.image, + sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + image: view.image, viewType: VK_IMAGE_VIEW_TYPE_2D, - format: format, + format: format, subresourceRange: { aspectMask: (usage == IU.Depth ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT), levelCount: 1, @@ -1163,16 +1224,16 @@ CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, Format format, ImageU }, }; - result = vkCreateImageView(vk.device, &view_info, null, &view.view); + result = vkCreateImageView(g_vk.device, &view_info, null, &view.view); // TODO: also handle here VkCheck("CreateImageView failure: vkCreateImageView error", result); - view.layout = VK_IMAGE_LAYOUT_UNDEFINED; - view.format = format; - view.w = w; - view.h = h; + view.layout = VK_IMAGE_LAYOUT_UNDEFINED; + view.format = format; + view.w = w; + view.h = h; view.depth_image = usage == IU.Depth; - view.usage = usage; + view.usage = usage; if(usage == IU.Draw || usage == IU.Depth || usage == IU.Convert) { @@ -1186,16 +1247,16 @@ CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, Format format, ImageU } void -BeginComputePass(Vulkan* vk) +BeginComputePass() { - VkResult result = vkWaitForFences(vk.device, 1, &vk.comp_fence, VK_TRUE, 1000000000); + VkResult result = vkWaitForFences(g_vk.device, 1, &g_vk.comp_fence, VK_TRUE, 1000000000); VkCheckA("BeginComputePass failure: vkWaitForFences error", result); - result = vkResetFences(vk.device, 1, &vk.comp_fence); + result = vkResetFences(g_vk.device, 1, &g_vk.comp_fence); VkCheckA("BeginComputepass failure: vkResetFences error", result); - result = vkResetCommandBuffer(vk.comp_cmd, 0); + result = vkResetCommandBuffer(g_vk.comp_cmd, 0); VkCheckA("BeginComputePass failure: vkResetCommandBuffer error", result); VkCommandBufferBeginInfo cmd_info = { @@ -1203,19 +1264,19 @@ BeginComputePass(Vulkan* vk) flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, }; - result = vkBeginCommandBuffer(vk.comp_cmd, &cmd_info); + result = vkBeginCommandBuffer(g_vk.comp_cmd, &cmd_info); VkCheckA("BeginComputePass failure: vkBeginCommandBuffer error", result); } void -FinishComputePass(Vulkan* vk) +FinishComputePass() { - VkResult result = vkEndCommandBuffer(vk.comp_cmd); + VkResult result = vkEndCommandBuffer(g_vk.comp_cmd); VkCheckA("FinishComputePass failure: vkEndCommandBuffer error", result); VkCommandBufferSubmitInfo cmd_info = { sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, - commandBuffer: vk.comp_cmd, + commandBuffer: g_vk.comp_cmd, }; VkSubmitInfo2 submit_info = { @@ -1224,14 +1285,14 @@ FinishComputePass(Vulkan* vk) pCommandBufferInfos: &cmd_info, }; - result = vkQueueSubmit2(vk.gfx_queue, 1, &submit_info, vk.comp_fence); + result = vkQueueSubmit2(g_vk.gfx_queue, 1, &submit_info, g_vk.comp_fence); VkCheckA("FinishComputePass failure: vkQueueSubmit2 error", result); } void -CreateBufferView(Vulkan* vk, Descriptor* desc, u64 size, Format format, DescType type, u32 binding) +CreateBufferView(Descriptor* desc, u64 size, Format format, DescType type, u32 binding) { - CreateBuffer(vk, &desc.buf_view.base, BT.BufferView, size, false); + CreateBuffer(&desc.buf_view.base, BT.BufferView, size, false); VkBufferViewCreateInfo info = { sType: VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, @@ -1240,7 +1301,7 @@ CreateBufferView(Vulkan* vk, Descriptor* desc, u64 size, Format format, DescType range: size, }; - VkResult result = vkCreateBufferView(vk.device, &info, null, &desc.buf_view.view); + VkResult result = vkCreateBufferView(g_vk.device, &info, null, &desc.buf_view.view); VkCheckA("CreateBufferView failure: vkCreateBufferView failed", result); desc.binding = binding; @@ -1248,16 +1309,16 @@ CreateBufferView(Vulkan* vk, Descriptor* desc, u64 size, Format format, DescType } void -PushConstants(T)(Vulkan* vk, Pipeline pipeline_id, T* pc) +PushConstants(T)(Pipeline pipeline_id, T* pc) { assert(pipeline_id > 0, "PushConstants pipeline_id == 0"); - PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_id; + PipelineHandles* pipeline = g_vk.pipeline_handles.ptr + pipeline_id; VkShaderStageFlags stage = (pipeline.type == VK_PIPELINE_BIND_POINT_GRAPHICS ? VK_SHADER_STAGE_VERTEX_BIT|VK_SHADER_STAGE_FRAGMENT_BIT : VK_SHADER_STAGE_COMPUTE_BIT); vkCmdPushConstants( - vk.cmds[vk.frame_index], + g_vk.cmd, pipeline.layout, stage, 0, @@ -1267,7 +1328,7 @@ PushConstants(T)(Vulkan* vk, Pipeline pipeline_id, T* pc) } void -ImageBarrier(Vulkan* vk) +ImageBarrier() { VkMemoryBarrier2 barrier = { sType: VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, @@ -1284,42 +1345,42 @@ ImageBarrier(Vulkan* vk) pMemoryBarriers: &barrier, }; - vkCmdPipelineBarrier2(vk.cmds[vk.frame_index], &dependency); + vkCmdPipelineBarrier2(g_vk.cmd, &dependency); } bool -Transfer(T)(Vulkan* vk, Buffer* buf, T[] data) +Transfer(T)(Buffer* buf, T[] data) { u8[] u8_data = (cast(u8*)(data.ptr))[0 .. T.sizeof * data.length]; - return Transfer(vk, buf, u8_data); + return Transfer(buf, u8_data); } bool -Transfer(Vulkan* vk, Buffer* buf, u8[] data) +Transfer(Buffer* buf, u8[] data) { - bool success = TransferReady(vk); + bool success = TransferReady(); u64 copied = 0; while(copied != data.length && success) { if(copied != 0) { - success = TransferReady(vk); + success = TransferReady(); if(!success) { break; } } - u64 transfer_length = cast(u64)vk.transfer_buf.data.length; + u64 transfer_length = cast(u64)g_vk.transfer_buf.data.length; u64 data_length = cast(u64)data.length - copied; u64 copy_length = transfer_length > data_length ? data_length : transfer_length; - vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length]; + g_vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length]; - auto fn = function(Vulkan* vk, Buffer* buf, VkBufferCopy copy) + auto fn = function(Buffer* buf, VkBufferCopy copy) { - vkCmdCopyBuffer(vk.imm_cmd, vk.transfer_buf.buffer, buf.buffer, 1, ©); + vkCmdCopyBuffer(g_vk.imm_cmd, g_vk.transfer_buf.buffer, buf.buffer, 1, ©); }; VkBufferCopy copy = { @@ -1328,29 +1389,29 @@ Transfer(Vulkan* vk, Buffer* buf, u8[] data) size: copy_length, }; - success = ImmSubmit(vk, buf, copy, fn); + success = ImmSubmit(buf, copy, fn); copied += copy_length; } - WaitForTransfers(vk); + WaitForTransfers(); return success; } bool -Transfer(T)(Vulkan* vk, Buffer* buf, T* ptr) +Transfer(T)(Buffer* buf, T* ptr) { - assert(T.sizeof < vk.transfer_buf.data.length, "Transfer failure: structure size is too large"); + assert(T.sizeof < g_vk.transfer_buf.data.length, "Transfer failure: structure size is too large"); - bool success = TransferReady(vk); + bool success = TransferReady(); if(success) { - memcpy(vk.transfer_buf.data.ptr, ptr, T.sizeof); + memcpy(g_vk.transfer_buf.data.ptr, ptr, T.sizeof); - auto fn = function(Vulkan* vk, Buffer* buf, VkBufferCopy copy) + auto fn = function(Buffer* buf, VkBufferCopy copy) { - vkCmdCopyBuffer(vk.imm_cmd, vk.transfer_buf.buffer, buf.buffer, 1, ©); + vkCmdCopyBuffer(g_vk.imm_cmd, g_vk.transfer_buf.buffer, buf.buffer, 1, ©); }; VkBufferCopy copy = { @@ -1359,49 +1420,49 @@ Transfer(T)(Vulkan* vk, Buffer* buf, T* ptr) size: T.sizeof, }; - success = ImmSubmit(vk, buf, copy, fn); + success = ImmSubmit(buf, copy, fn); } - WaitForTransfers(vk); + WaitForTransfers(); return success; } // Needs to be done before writing to the transfer buffer as otherwise it'll overwrite it while previous transfers are occurring bool -TransferReady(Vulkan* vk) +TransferReady() { - VkResult result = vkWaitForFences(vk.device, 1, &vk.imm_fence, true, 999999999); + VkResult result = vkWaitForFences(g_vk.device, 1, &g_vk.imm_fence, true, 999999999); return VkCheck("Transfer failure: vkWaitForFences error", result); } bool -Transfer(Vulkan* vk, ImageView* view, u8[] data, u32 w, u32 h) +Transfer(ImageView* view, u8[] data, u32 w, u32 h) { - return Transfer(vk, &view.base, data, w, h); + return Transfer(&view.base, data, w, h); } bool -Transfer(Vulkan* vk, Image* image, u8[] data, u32 w, u32 h) +Transfer(Image* image, u8[] data, u32 w, u32 h) { bool success = true; u64 copied = 0; while(copied != data.length) { - u64 transfer_length = cast(u64)vk.transfer_buf.data.length; + u64 transfer_length = cast(u64)g_vk.transfer_buf.data.length; u64 data_length = cast(u64)data.length - copied; u64 copy_length = transfer_length > data_length ? data_length : transfer_length; - vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length]; + g_vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length]; - auto fn = function(Vulkan* vk, Image* image, VkBufferImageCopy copy) + auto fn = function(Image* image, VkBufferImageCopy copy) { - Transition(vk.imm_cmd, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); + Transition(g_vk.imm_cmd, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); - vkCmdCopyBufferToImage(vk.imm_cmd, vk.transfer_buf.buffer, image.image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ©); + vkCmdCopyBufferToImage(g_vk.imm_cmd, g_vk.transfer_buf.buffer, image.image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ©); - Transition(vk.imm_cmd, image, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + Transition(g_vk.imm_cmd, image, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); }; VkBufferImageCopy copy = { @@ -1419,12 +1480,12 @@ Transfer(Vulkan* vk, Image* image, u8[] data, u32 w, u32 h) bufferOffset: copied, }; - success = ImmSubmit(vk, image, copy, fn); + success = ImmSubmit(image, copy, fn); copied += copy_length; } - WaitForTransfers(vk); + WaitForTransfers(); return success; } @@ -1480,18 +1541,18 @@ Copy(VkCommandBuffer cmd, VkImage src, VkImage dst, VkImageLayout src_layout, Vk } void -Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets, bool compute = false) +Bind(Pipeline pipeline_handle, DescSet[] sets, bool compute = false) { assert(pipeline_handle > 0, "Bind failure: pipeline is 0"); - VkCommandBuffer cmd = (compute ? vk.comp_cmd : vk.cmds[vk.frame_index]); - PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_handle; - BindPipeline(vk, cmd, pipeline); + VkCommandBuffer cmd = (compute ? g_vk.comp_cmd : g_vk.cmd); + PipelineHandles* pipeline = g_vk.pipeline_handles.ptr + pipeline_handle; + BindPipeline(cmd, pipeline); u32[] offsets; - if(vk.global_set.dynamic_count > 0) + if(g_vk.global_set.dynamic_count > 0) { - offsets = Alloc!(u32)(&vk.frame_arenas[vk.frame_index], vk.global_set.dynamic_count); + offsets = Alloc!(u32)(g_vk.FrameArena(), g_vk.global_set.dynamic_count); } vkCmdBindDescriptorSets( @@ -1500,13 +1561,13 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets, bool compute = false) pipeline.layout, 0, 1, - &vk.global_set.handle, + &g_vk.global_set.handle, cast(u32)offsets.length, offsets.ptr ); u32 offset_count; - VkDescriptorSet[] handles = Alloc!(VkDescriptorSet)(&vk.frame_arenas[vk.frame_index], sets.length); + VkDescriptorSet[] handles = Alloc!(VkDescriptorSet)(g_vk.FrameArena(), sets.length); foreach(i, set; sets) { @@ -1516,7 +1577,7 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets, bool compute = false) if(offset_count > 0) { - offsets = Alloc!(u32)(&vk.frame_arenas[vk.frame_index], offset_count); + offsets = Alloc!(u32)(g_vk.FrameArena(), offset_count); } vkCmdBindDescriptorSets( @@ -1532,28 +1593,28 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets, bool compute = false) } PipelineHandles* -NewPipeline(Vulkan* vk) +NewPipeline() { - PipelineHandles* pipeline = vk.pipeline_handles.ptr + vk.pipeline_count; - pipeline.index = vk.pipeline_count; - vk.pipeline_count += 1; + PipelineHandles* pipeline = g_vk.pipeline_handles.ptr + g_vk.pipeline_count; + pipeline.index = g_vk.pipeline_count; + g_vk.pipeline_count += 1; return pipeline; } void -Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set, bool compute = false) +Bind(Pipeline pipeline_handle, DescSet set, bool compute = false) { assert(pipeline_handle > 0, "Bind failure: pipeline is 0"); - VkCommandBuffer cmd = (compute ? vk.comp_cmd : vk.cmds[vk.frame_index]); - PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_handle; - BindPipeline(vk, cmd, pipeline); + VkCommandBuffer cmd = (compute ? g_vk.comp_cmd : g_vk.cmd); + PipelineHandles* pipeline = g_vk.pipeline_handles.ptr + pipeline_handle; + BindPipeline(cmd, pipeline); u32[] offsets; - if(vk.global_set.dynamic_count > 0) + if(g_vk.global_set.dynamic_count > 0) { - offsets = Alloc!(u32)(&vk.frame_arenas[vk.frame_index], vk.global_set.dynamic_count); + offsets = Alloc!(u32)(g_vk.FrameArena(), g_vk.global_set.dynamic_count); } vkCmdBindDescriptorSets( @@ -1562,14 +1623,14 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set, bool compute = false) pipeline.layout, 0, 1, - &vk.global_set.handle, + &g_vk.global_set.handle, cast(u32)offsets.length, offsets.ptr ); if(set.dynamic_count > 0) { - offsets = Alloc!(u32)(&vk.frame_arenas[vk.frame_index], set.dynamic_count); + offsets = Alloc!(u32)(g_vk.FrameArena(), set.dynamic_count); } vkCmdBindDescriptorSets( @@ -1585,51 +1646,51 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set, bool compute = false) } void -SetExtent(Vulkan* vk, u32 x, u32 y) +SetExtent(u32 x, u32 y) { - vk.recreate_swapchain = true; - vk.res = [x, y]; + g_vk.recreate_swapchain = true; + g_vk.res = [x, y]; } void -BindPipeline(Vulkan* vk, VkCommandBuffer cmd, PipelineHandles* pipeline) +BindPipeline(VkCommandBuffer cmd, PipelineHandles* pipeline) { vkCmdBindPipeline(cmd, pipeline.type, pipeline.handle); - vk.last_pipeline[vk.frame_index] = pipeline.handle; + g_vk.last_pipeline = pipeline.handle; VkViewport viewport = { x: 0.0, y: 0.0, - width: cast(f32)vk.swapchain_extent.width, - height: cast(f32)vk.swapchain_extent.height, + width: cast(f32)g_vk.swapchain_extent.width, + height: cast(f32)g_vk.swapchain_extent.height, minDepth: 0.0, maxDepth: 1.0, }; vkCmdSetViewport(cmd, 0, 1, &viewport); - ResetScissor(vk, cmd); + ResetScissor(cmd); } void -SetScissor(Vulkan* vk, u32 x, u32 y, u32 w, u32 h) +SetScissor(u32 x, u32 y, u32 w, u32 h) { VkRect2D scissor = { offset: { x: x, y: y }, extent: { width: w, height: h }, }; - vkCmdSetScissor(vk.cmds[vk.frame_index], 0, 1, &scissor); + vkCmdSetScissor(g_vk.cmd, 0, 1, &scissor); } void -ResetScissor(Vulkan* vk, VkCommandBuffer cmd = cast(VkCommandBuffer)VK_NULL_HANDLE) +ResetScissor(VkCommandBuffer cmd = cast(VkCommandBuffer)VK_NULL_HANDLE) { - cmd = cmd == VK_NULL_HANDLE ? vk.cmds[vk.frame_index] : cmd; + cmd = cmd == VK_NULL_HANDLE ? g_vk.cmd : cmd; VkRect2D scissor = { offset: { x: 0, y: 0 }, - extent: { width: vk.swapchain_extent.width, height: vk.swapchain_extent.height }, + extent: { width: g_vk.swapchain_extent.width, height: g_vk.swapchain_extent.height }, }; vkCmdSetScissor(cmd, 0, 1, &scissor); @@ -1710,7 +1771,7 @@ Transition(VkCommandBuffer cmd, Image* image, VkImageLayout new_layout) } bool -BuildShader(Vulkan* vk, Shader* shader, u8[] bytes) +BuildShader(Shader* shader, u8[] bytes) { VkShaderModuleCreateInfo shader_info = { sType: VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, @@ -1718,14 +1779,14 @@ BuildShader(Vulkan* vk, Shader* shader, u8[] bytes) pCode: cast(uint*)bytes.ptr, }; - VkResult result = vkCreateShaderModule(vk.device, &shader_info, null, shader); + VkResult result = vkCreateShaderModule(g_vk.device, &shader_info, null, shader); return VkCheck("vkCreateShaderModule failure", result); } bool -CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* build_info) +CreateGraphicsPipeline(Pipeline* pipeline_handle, GfxPipelineInfo* build_info) { - PipelineHandles* pipeline = NewPipeline(vk); + PipelineHandles* pipeline = NewPipeline(); pipeline.type = VK_PIPELINE_BIND_POINT_GRAPHICS; pipeline.layout = build_info.layout; @@ -1778,8 +1839,8 @@ CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* b VkPipelineRenderingCreateInfo rendering_info = { sType: VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, colorAttachmentCount: 1, - pColorAttachmentFormats: cast(VkFormat*)&vk.draw_image.view.format, - depthAttachmentFormat: vk.depth_image.view.format, + pColorAttachmentFormats: cast(VkFormat*)&g_vk.draw_image.view.format, + depthAttachmentFormat: g_vk.depth_image.view.format, }; VkPipelineColorBlendAttachmentState blend_state = { @@ -1834,18 +1895,18 @@ CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* b }, ]; - Arena* arena = &vk.frame_arenas[0]; + Arena* arena = &g_vk.frame_arenas[0]; bool success = true; Shader frag_module, vert_module; - success &= BuildShader(vk, &frag_module, build_info.frag_shader); - success &= BuildShader(vk, &vert_module, build_info.vertex_shader); + success &= BuildShader(&frag_module, build_info.frag_shader); + success &= BuildShader(&vert_module, build_info.vertex_shader); scope(exit) { - Destroy(vk, frag_module); - Destroy(vk, vert_module); + Destroy(frag_module); + Destroy(vert_module); } if(success) @@ -1890,11 +1951,11 @@ CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* b pDynamicState: &dyn_info, stageCount: cast(u32)shader_info.length, pStages: shader_info.ptr, - renderPass: vk.render_pass, + renderPass: g_vk.render_pass, layout: build_info.layout, }; - VkResult result = vkCreateGraphicsPipelines(vk.device, null, 1, &create_info, null, &pipeline.handle); + VkResult result = vkCreateGraphicsPipelines(g_vk.device, null, 1, &create_info, null, &pipeline.handle); success = VkCheck("CreateGraphicsPipeline failure", result); *pipeline_handle = pipeline.index; @@ -1904,7 +1965,7 @@ CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* b } Pipeline -CreateComputePipeline(Vulkan* vk, CompPipelineInfo* comp_info) +CreateComputePipeline(CompPipelineInfo* comp_info) { VkComputePipelineCreateInfo info = { sType: VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, @@ -1917,8 +1978,8 @@ CreateComputePipeline(Vulkan* vk, CompPipelineInfo* comp_info) }; Shader comp_module; - BuildShader(vk, &comp_module, comp_info.shader); - scope(exit) Destroy(vk, comp_module); + BuildShader(&comp_module, comp_info.shader); + scope(exit) Destroy(comp_module); __traits(getMember, &info.stage, "module") = comp_module; @@ -1934,61 +1995,61 @@ CreateComputePipeline(Vulkan* vk, CompPipelineInfo* comp_info) info.stage.pSpecializationInfo = &spec_info; } - PipelineHandles* pipeline = NewPipeline(vk); + PipelineHandles* pipeline = NewPipeline(); pipeline.type = VK_PIPELINE_BIND_POINT_COMPUTE; pipeline.layout = comp_info.layout; Pipeline pipeline_handle = pipeline.index; - VkResult result = vkCreateComputePipelines(vk.device, null, 1, &info, null, &pipeline.handle); + VkResult result = vkCreateComputePipelines(g_vk.device, null, 1, &info, null, &pipeline.handle); VkCheck("CreateComputePipeline failure", result); return pipeline_handle; } void -ClearDepth(T)(Vulkan* vk, T color) +ClearDepth(T)(T color) { static if(is(T.v : f32[4])) { - ClearDepth(vk, color.v); + ClearDepth(color.v); } else static assert(false, "Invalid type for clear depth"); } void -ClearColor(T)(Vulkan* vk, T color) +ClearColor(T)(T color) { static if(is(T.v : f32[4])) { - ClearColor(vk, color.v); + ClearColor(color.v); } else static assert(false, "Invalid type for clear depth"); } void -ClearDepth(Vulkan* vk, f32[4] color = [0.0, 0.0, 0.0, 0.0]) +ClearDepth(f32[4] color = [0.0, 0.0, 0.0, 0.0]) { - Transition(vk.cmds[vk.frame_index], &vk.depth_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); - ClearColor(vk, &vk.depth_image, color); + Transition(g_vk.cmd, &g_vk.depth_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); + ClearColor(&g_vk.depth_image, color); } void -ClearColor(Vulkan* vk, f32[4] color) +ClearColor(f32[4] color) { - Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); - ClearColor(vk, &vk.draw_image, color); + Transition(g_vk.cmd, &g_vk.draw_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); + ClearColor(&g_vk.draw_image, color); } void -ClearColor(Vulkan* vk, Descriptor* desc, f32[4] color) +ClearColor(Descriptor* desc, f32[4] color) { ImageDescCheck(desc); - ClearColor(vk, &desc.view, color); + ClearColor(&desc.view, color); } void -ClearColor(Vulkan* vk, ImageView* view, f32[4] color) +ClearColor(ImageView* view, f32[4] color) { VkImageSubresourceRange clear_range = { aspectMask: (view.depth_image ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT), @@ -1997,7 +2058,7 @@ ClearColor(Vulkan* vk, ImageView* view, f32[4] color) }; vkCmdClearColorImage( - vk.cmds[vk.frame_index], + g_vk.cmd, view.image, view.layout, cast(VkClearColorValue*)color, @@ -2007,29 +2068,29 @@ ClearColor(Vulkan* vk, ImageView* view, f32[4] color) } void -WaitIdle(Vulkan* vk) +WaitIdle() { - vkDeviceWaitIdle(vk.device); + vkDeviceWaitIdle(g_vk.device); } void -Destroy(Vulkan* vk, Shader shader) +Destroy(Shader shader) { - vkDestroyShaderModule(vk.device, shader, null); + vkDestroyShaderModule(g_vk.device, shader, null); } void -Destroy(Vulkan* vk, Pipeline pipeline) +Destroy(Pipeline pipeline) { - vkDestroyPipeline(vk.device, vk.pipeline_handles[pipeline].handle, null); + vkDestroyPipeline(g_vk.device, g_vk.pipeline_handles[pipeline].handle, null); } void -DestroyDescriptorPools(Vulkan* vk) +DestroyDescriptorPools() { - vkDestroyDescriptorPool(vk.device, vk.active_pool, null); + vkDestroyDescriptorPool(g_vk.device, g_vk.active_pool, null); - Node!(VkDescriptorPool)* node = vk.full_pools.first; + Node!(VkDescriptorPool)* node = g_vk.full_pools.first; for(;;) { if(node == null) @@ -2037,14 +2098,14 @@ DestroyDescriptorPools(Vulkan* vk) break; } - vkDestroyDescriptorPool(vk.device, node.value, null); + vkDestroyDescriptorPool(g_vk.device, node.value, null); node = node.next; } } void -PushDescriptorPool(Vulkan* vk) +PushDescriptorPool() { VkDescriptorPoolSize[8] pool_sizes = [ { type: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, descriptorCount: 4096 }, @@ -2065,24 +2126,24 @@ PushDescriptorPool(Vulkan* vk) }; VkDescriptorPool pool; - VkResult result = vkCreateDescriptorPool(vk.device, &pool_info, null, &pool); + VkResult result = vkCreateDescriptorPool(g_vk.device, &pool_info, null, &pool); bool success = VkCheck("vkCreateDescriptorPool failure", result); assert(success, "vkCreateDescriptorPool error"); - if(vk.active_pool == null || vk.active_pool == VK_NULL_HANDLE) + if(g_vk.active_pool == null || g_vk.active_pool == VK_NULL_HANDLE) { Node!(VkDescriptorPool)* node = Alloc!(Node!(VkDescriptorPool)); - node.value = vk.active_pool; - PushFront(&vk.full_pools, node, null); + node.value = g_vk.active_pool; + PushFront(&g_vk.full_pools, node, null); } - vk.active_pool = pool; + g_vk.active_pool = pool; } void -PrepareWrite(Vulkan* vk, VkWriteDescriptorSet* write, DescSet set, Descriptor* desc) +PrepareWrite(VkWriteDescriptorSet* write, DescSet set, Descriptor* desc) { - Arena* arena = vk.frame_arenas.ptr + vk.frame_index; + Arena* arena = g_vk.FrameArena(); write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; write.descriptorType = desc.type; @@ -2130,31 +2191,31 @@ PrepareWrite(Vulkan* vk, VkWriteDescriptorSet* write, DescSet set, Descriptor* d } void -Write(Vulkan* vk, DescSet set, Descriptor[] descs) +Write(DescSet set, Descriptor[] descs) { - Arena* arena = vk.frame_arenas.ptr + vk.frame_index; + Arena* arena = g_vk.FrameArena(); VkWriteDescriptorSet[] writes = Alloc!(VkWriteDescriptorSet)(arena, descs.length); for(u64 i = 0; i < descs.length; i += 1) { VkWriteDescriptorSet* write = writes.ptr + i; - PrepareWrite(vk, write, set, &descs[i]); + PrepareWrite(write, set, &descs[i]); } - vkUpdateDescriptorSets(vk.device, cast(u32)writes.length, writes.ptr, 0, null); + vkUpdateDescriptorSets(g_vk.device, cast(u32)writes.length, writes.ptr, 0, null); } void -Write(Vulkan* vk, DescSet set, Descriptor* desc) +Write(DescSet set, Descriptor* desc) { VkWriteDescriptorSet write; - PrepareWrite(vk, &write, set, desc); - vkUpdateDescriptorSets(vk.device, 1, &write, 0, null); + PrepareWrite(&write, set, desc); + vkUpdateDescriptorSets(g_vk.device, 1, &write, 0, null); } void -WriteConvDescriptor(Vulkan* vk, Buffer* buf) +WriteConvDescriptor(Buffer* buf) { VkDescriptorBufferInfo buf_info = { buffer: buf.buffer, @@ -2164,18 +2225,18 @@ WriteConvDescriptor(Vulkan* vk, Buffer* buf) VkWriteDescriptorSet write = { sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, - dstSet: vk.conv_desc_set.handle, + dstSet: g_vk.conv_desc_set.handle, dstBinding: 1, descriptorCount: 1, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, pBufferInfo: &buf_info, }; - vkUpdateDescriptorSets(vk.device, 1, &write, 0, null); + vkUpdateDescriptorSets(g_vk.device, 1, &write, 0, null); } void -WriteConvDescriptor(Vulkan* vk, ImageView* view) +WriteConvDescriptor(ImageView* view) { VkDescriptorImageInfo image_info = { imageView: view.view, @@ -2184,28 +2245,28 @@ WriteConvDescriptor(Vulkan* vk, ImageView* view) VkWriteDescriptorSet write = { sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, - dstSet: vk.conv_desc_set.handle, + dstSet: g_vk.conv_desc_set.handle, dstBinding: 0, descriptorCount: 1, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, pImageInfo: &image_info, }; - vkUpdateDescriptorSets(vk.device, 1, &write, 0, null); + vkUpdateDescriptorSets(g_vk.device, 1, &write, 0, null); } void -Dispatch(Vulkan* vk, VkCommandBuffer cmd) +Dispatch(VkCommandBuffer cmd) { - f32 w = Ceil(cast(f32)(vk.swapchain_extent.width) / 16.0F); - f32 h = Ceil(cast(f32)(vk.swapchain_extent.height) / 16.0F); + f32 w = Ceil(cast(f32)(g_vk.swapchain_extent.width) / 16.0F); + f32 h = Ceil(cast(f32)(g_vk.swapchain_extent.height) / 16.0F); vkCmdDispatch(cmd, cast(u32)w, cast(u32)h, 1); } void -Dispatch(Vulkan* vk) +Dispatch() { - Dispatch(vk, vk.cmds[vk.frame_index]); + Dispatch(g_vk.cmd); } bool @@ -2241,14 +2302,14 @@ VkCheckA(string message, VkResult result) } Format -GetDrawImageFormat(Vulkan* vk) +GetDrawImageFormat() { VkFormat selected_format; foreach(format; VK_IMAGE_FORMATS) { VkImageFormatProperties props; VkResult result = vkGetPhysicalDeviceImageFormatProperties( - vk.physical_device, + g_vk.physical_device, format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL, @@ -2272,19 +2333,19 @@ GetDrawImageFormat(Vulkan* vk) } void -SelectSwapchainFormats(Vulkan* vk) +SelectSwapchainFormats() { - Arena* arena = &vk.frame_arenas[0]; + Arena* arena = &g_vk.frame_arenas[0]; u32 format_count; - vkGetPhysicalDeviceSurfaceFormatsKHR(vk.physical_device, vk.surface, &format_count, null); + vkGetPhysicalDeviceSurfaceFormatsKHR(g_vk.physical_device, g_vk.surface, &format_count, null); VkSurfaceFormatKHR[] formats = Alloc!(VkSurfaceFormatKHR)(arena, format_count); - vkGetPhysicalDeviceSurfaceFormatsKHR(vk.physical_device, vk.surface, &format_count, formats.ptr); + vkGetPhysicalDeviceSurfaceFormatsKHR(g_vk.physical_device, g_vk.surface, &format_count, formats.ptr); u32 mode_count; - vkGetPhysicalDeviceSurfacePresentModesKHR(vk.physical_device, vk.surface, &mode_count, null); + vkGetPhysicalDeviceSurfacePresentModesKHR(g_vk.physical_device, g_vk.surface, &mode_count, null); VkPresentModeKHR[] modes = Alloc!(VkPresentModeKHR)(arena, mode_count); - vkGetPhysicalDeviceSurfacePresentModesKHR(vk.physical_device, vk.surface, &mode_count, modes.ptr); + vkGetPhysicalDeviceSurfacePresentModesKHR(g_vk.physical_device, g_vk.surface, &mode_count, modes.ptr); VkPresentModeKHR present_mode = VK_PRESENT_MODE_FIFO_KHR; foreach(mode; modes) @@ -2306,15 +2367,15 @@ SelectSwapchainFormats(Vulkan* vk) } } - vk.surface_format = surface_format; - vk.present_mode = present_mode; + g_vk.surface_format = surface_format; + g_vk.present_mode = present_mode; } Descriptor -CreateSampler(Vulkan* vk, MipmapMode mode, u32 binding) +CreateSampler(MipmapMode mode, u32 binding) { VkPhysicalDeviceProperties props; - vkGetPhysicalDeviceProperties(vk.physical_device, &props); + vkGetPhysicalDeviceProperties(g_vk.physical_device, &props); VkSamplerCreateInfo sampler_info = { sType: VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, @@ -2335,46 +2396,46 @@ CreateSampler(Vulkan* vk, MipmapMode mode, u32 binding) binding: binding, }; - VkResult result = vkCreateSampler(vk.device, &sampler_info, null, &sampler.sampler); + VkResult result = vkCreateSampler(g_vk.device, &sampler_info, null, &sampler.sampler); VkCheckA("vkCreateSampler failure", result); return sampler; } bool -CreateDrawImages(Vulkan* vk) +CreateDrawImages() { bool success = true; - Format draw_format = cast(Format)GetDrawImageFormat(vk); + Format draw_format = cast(Format)GetDrawImageFormat(); Format depth_format = cast(Format)VK_FORMAT_D32_SFLOAT; - u32 w = vk.swapchain_extent.width; - u32 h = vk.swapchain_extent.height; + u32 w = g_vk.swapchain_extent.width; + u32 h = g_vk.swapchain_extent.height; - CreateImageView(vk, &vk.draw_image, w, h, draw_format, IU.Draw, DT.StorageImage, 0); - CreateImageView(vk, &vk.depth_image, w, h, depth_format, IU.Depth, DT.Image, 0); + CreateImageView(&g_vk.draw_image, w, h, draw_format, IU.Draw, DT.StorageImage, 0); + CreateImageView(&g_vk.depth_image, w, h, depth_format, IU.Depth, DT.Image, 0); return success; } bool -CreateSwapchain(Vulkan* vk) +CreateSwapchain() { bool success = true; - Arena* arena = &vk.frame_arenas[0]; + Arena* arena = &g_vk.frame_arenas[0]; VkSurfaceCapabilitiesKHR cap; - vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vk.physical_device, vk.surface, &cap); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR(g_vk.physical_device, g_vk.surface, &cap); static bool initialized = false; if(!initialized) { - SelectSwapchainFormats(vk); + SelectSwapchainFormats(); } - u32 w = clamp(cast(u32)vk.res[0], cap.minImageExtent.width, cap.maxImageExtent.width); - u32 h = clamp(cast(u32)vk.res[1], cap.minImageExtent.height, cap.maxImageExtent.height); + u32 w = clamp(cast(u32)g_vk.res[0], cap.minImageExtent.width, cap.maxImageExtent.width); + u32 h = clamp(cast(u32)g_vk.res[1], cap.minImageExtent.height, cap.maxImageExtent.height); VkSwapchainCreateInfoKHR info = { sType: VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, @@ -2384,27 +2445,27 @@ CreateSwapchain(Vulkan* vk) clipped: VK_TRUE, imageSharingMode: VK_SHARING_MODE_EXCLUSIVE, minImageCount: cap.minImageCount + 1, - surface: vk.surface, - imageFormat: vk.surface_format.format, - imageColorSpace: vk.surface_format.colorSpace, + surface: g_vk.surface, + imageFormat: g_vk.surface_format.format, + imageColorSpace: g_vk.surface_format.colorSpace, imageExtent: { width: w, height: h, }, preTransform: cap.currentTransform, - presentMode: vk.present_mode, + presentMode: g_vk.present_mode, }; - VkResult result = vkCreateSwapchainKHR(vk.device, &info, null, &vk.swapchain); + VkResult result = vkCreateSwapchainKHR(g_vk.device, &info, null, &g_vk.swapchain); success = VkCheck("vkCreateSwapchainKHR failure", result); u32 count; - vkGetSwapchainImagesKHR(vk.device, vk.swapchain, &count, null); + vkGetSwapchainImagesKHR(g_vk.device, g_vk.swapchain, &count, null); VkImage[] images = Alloc!(VkImage)(arena, count); - vkGetSwapchainImagesKHR(vk.device, vk.swapchain, &count, images.ptr); + vkGetSwapchainImagesKHR(g_vk.device, g_vk.swapchain, &count, images.ptr); VkImageView[] views = Alloc!(VkImageView)(arena, count); - vk.present_images = Alloc!(ImageView)(&vk.arena, count); + g_vk.present_images = Alloc!(ImageView)(&g_vk.arena, count); VkImageViewCreateInfo view_info = { sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, @@ -2428,20 +2489,20 @@ CreateSwapchain(Vulkan* vk) sType: VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, }; - foreach(i, image; vk.present_images) + foreach(i, image; g_vk.present_images) { - vk.present_images[i].image = images[i]; - vk.present_images[i].format = cast(Format)vk.surface_format.format; + g_vk.present_images[i].image = images[i]; + g_vk.present_images[i].format = cast(Format)g_vk.surface_format.format; view_info.image = images[i]; - view_info.format = vk.surface_format.format; + view_info.format = g_vk.surface_format.format; - result = vkCreateImageView(vk.device, &view_info, null, &vk.present_images[i].view); + result = vkCreateImageView(g_vk.device, &view_info, null, &g_vk.present_images[i].view); success = VkCheck("vkCreateImageView failure", result); } - vk.swapchain_extent.width = w; - vk.swapchain_extent.height = h; - vk.swapchain_extent.depth = 1; + g_vk.swapchain_extent.width = w; + g_vk.swapchain_extent.height = h; + g_vk.swapchain_extent.depth = 1; if(!initialized && success) { @@ -2452,91 +2513,91 @@ CreateSwapchain(Vulkan* vk) } void -RecreateSwapchain(Vulkan* vk) +RecreateSwapchain() { - vkDeviceWaitIdle(vk.device); + vkDeviceWaitIdle(g_vk.device); - Destroy(vk.swapchain, vk.present_images, vk.device); - Destroy(vk, &vk.draw_image); - Destroy(vk, &vk.depth_image); - vkDestroyFramebuffer(vk.device, vk.framebuffer, null); + Destroy(g_vk.swapchain, g_vk.present_images, g_vk.device); + Destroy(&g_vk.draw_image); + Destroy(&g_vk.depth_image); + vkDestroyFramebuffer(g_vk.device, g_vk.framebuffer, null); - CreateSwapchain(vk); - CreateDrawImages(vk); - Write(vk, vk.global_set, &vk.draw_image); - CreateFramebuffer(vk); + CreateSwapchain(); + CreateDrawImages(); + Write(g_vk.global_set, &g_vk.draw_image); + CreateFramebuffer(); - vk.recreate_swapchain = false; + g_vk.recreate_swapchain = false; } void -CreateFramebuffer(Vulkan* vk) +CreateFramebuffer() { VkFramebufferCreateInfo framebuffer_info = { sType: VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, - renderPass: vk.render_pass, + renderPass: g_vk.render_pass, attachmentCount: 2, - pAttachments: [vk.draw_image.view.view, vk.depth_image.view.view], - width: vk.swapchain_extent.width, - height: vk.swapchain_extent.height, + pAttachments: [g_vk.draw_image.view.view, g_vk.depth_image.view.view], + width: g_vk.swapchain_extent.width, + height: g_vk.swapchain_extent.height, layers: 1, }; - VkResult result = vkCreateFramebuffer(vk.device, &framebuffer_info, null, &vk.framebuffer); + VkResult result = vkCreateFramebuffer(g_vk.device, &framebuffer_info, null, &g_vk.framebuffer); VkCheckA("vkCreateFramebuffer failure", result); } void -Destroy(T)(Vulkan* vk, MappedBuffer!(T)* buf) +Destroy(T)(MappedBuffer!(T)* buf) { - vmaUnmapMemory(vk.vma, buf.alloc); - Destroy(vk, &buf.base); + vmaUnmapMemory(g_vk.vma, buf.alloc); + Destroy(&buf.base); } void -Destroy(Vulkan* vk, Buffer* buf) +Destroy(Buffer* buf) { - vmaDestroyBuffer(vk.vma, buf.buffer, buf.alloc); + vmaDestroyBuffer(g_vk.vma, buf.buffer, buf.alloc); } void -Destroy(Vulkan* vk, Descriptor* desc) +Destroy(Descriptor* desc) { switch(desc.type) { case DT.Sampler: { - vkDestroySampler(vk.device, desc.sampler, null); + vkDestroySampler(g_vk.device, desc.sampler, null); } break; case DT.CombinedSampler, DT.Image, DT.StorageImage, DT.InputAttach: { if(desc.view.view) { - vkDestroyImageView(vk.device, desc.view.view, null); + vkDestroyImageView(g_vk.device, desc.view.view, null); } if(desc.view.image) { - vmaDestroyImage(vk.vma, desc.view.image, desc.view.alloc); + vmaDestroyImage(g_vk.vma, desc.view.image, desc.view.alloc); } } break; case DT.UniformTexelBuf, DT.StorageTexelBuf: { if(desc.buf_view.view) { - vkDestroyBufferView(vk.device, desc.buf_view.view, null); + vkDestroyBufferView(g_vk.device, desc.buf_view.view, null); } if(desc.buf_view.buffer) { - vmaDestroyBuffer(vk.vma, desc.buf_view.buffer, desc.buf_view.alloc); + vmaDestroyBuffer(g_vk.vma, desc.buf_view.buffer, desc.buf_view.alloc); } } break; case DT.Uniform, DT.DynamicUniform, DT.Storage, DT.DynamicStorage: { if(desc.buf.buffer) { - vmaDestroyBuffer(vk.vma, desc.buf.buffer, desc.buf.alloc); + vmaDestroyBuffer(g_vk.vma, desc.buf.buffer, desc.buf.alloc); } } break; default: assert(false, "Unsupported descriptor Destroy call"); @@ -2544,14 +2605,14 @@ Destroy(Vulkan* vk, Descriptor* desc) } void -DestroyRenderer(Vulkan* vk) +DestroyRenderer() { - foreach(i, arena; vk.frame_arenas) + foreach(i, arena; g_vk.frame_arenas) { - Free(vk.frame_arenas.ptr + i); + Free(g_vk.frame_arenas.ptr + i); } - Free(&vk.arena); + Free(&g_vk.arena); } void @@ -2599,20 +2660,20 @@ Destroy(VkSwapchainKHR swapchain, ImageView[] views, VkDevice device) } void -PrintShaderDisassembly(Vulkan* vk, Pipeline pipeline_id, VkShaderStageFlagBits stage) +PrintShaderDisassembly(Pipeline pipeline_id, VkShaderStageFlagBits stage) { version(AMD_GPU) { - PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_id; + PipelineHandles* pipeline = g_vk.pipeline_handles.ptr + pipeline_id; version(VULKAN_DEBUG) { u64 size; - VkResult result = vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, null); + VkResult result = vkGetShaderInfoAMD(g_vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, null); if(result == VK_SUCCESS) { - u8[] buf = Alloc!(u8)(&vk.frame_arenas[vk.frame_index], size); - vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, buf.ptr); + u8[] buf = Alloc!(u8)(g_vk.FrameArena(), size); + vkGetShaderInfoAMD(g_vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, buf.ptr); Logf("DISASSEMBLY:\n%r", buf); } } @@ -2620,36 +2681,36 @@ PrintShaderDisassembly(Vulkan* vk, Pipeline pipeline_id, VkShaderStageFlagBits s } void -SetStencilWriteMask(Vulkan* vk, StencilFace face, u32 mask) +SetStencilWriteMask(StencilFace face, u32 mask) { - vkCmdSetStencilWriteMask(vk.cmds[vk.frame_index], face, mask); + vkCmdSetStencilWriteMask(g_vk.cmd, face, mask); } void -SetStencilReference(Vulkan* vk, StencilFace face, u32 reference) +SetStencilReference(StencilFace face, u32 reference) { - vkCmdSetStencilReference(vk.cmds[vk.frame_index], face, reference); + vkCmdSetStencilReference(g_vk.cmd, face, reference); } void -SetStencilCompareMask(Vulkan* vk, StencilFace face, u32 mask) +SetStencilCompareMask(StencilFace face, u32 mask) { - vkCmdSetStencilCompareMask(vk.cmds[vk.frame_index], face, mask); + vkCmdSetStencilCompareMask(g_vk.cmd, face, mask); } void -SetStencilTest(Vulkan* vk, VkBool32 enable) +SetStencilTest(VkBool32 enable) { - vkCmdSetStencilTestEnable(vk.cmds[vk.frame_index], enable); + vkCmdSetStencilTestEnable(g_vk.cmd, enable); } void -SetStencilOp(Vulkan* vk, StencilFace face, StencilOp fail_op, StencilOp pass_op, StencilOp depth_fail_op, CompareOp cmp_op) +SetStencilOp(StencilFace face, StencilOp fail_op, StencilOp pass_op, StencilOp depth_fail_op, CompareOp cmp_op) { - vkCmdSetStencilOp(vk.cmds[vk.frame_index], face, fail_op, pass_op, depth_fail_op, cmp_op); + vkCmdSetStencilOp(g_vk.cmd, face, fail_op, pass_op, depth_fail_op, cmp_op); } -Vulkan +void Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) { bool success; @@ -2663,6 +2724,8 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) platform_handles: platform_handles, }; + g_vk = vk; + Library lib; Function fn; foreach(name; VULKAN_LIBS) @@ -2691,7 +2754,7 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) if(success) { - Arena* arena = &vk.frame_arenas[0]; + Arena* arena = &g_vk.frame_arenas[0]; u32 count; vkEnumerateInstanceLayerProperties(&count, null); @@ -2741,13 +2804,13 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) instance_info.pNext = &validation_features; } - VkResult result = vkCreateInstance(&instance_info, null, &vk.instance); + VkResult result = vkCreateInstance(&instance_info, null, &g_vk.instance); success &= VkCheck("vkCreateInstance failure", result); } if(success) { - LoadInstanceFunctions(&vk); + LoadInstanceFunctions(); version (VULKAN_DEBUG) { @@ -2774,7 +2837,7 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) info.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT; } - VkResult result = vkCreateDebugUtilsMessengerEXT(vk.instance, &info, null, &vk.dbg_msg); + VkResult result = vkCreateDebugUtilsMessengerEXT(g_vk.instance, &info, null, &g_vk.dbg_msg); if(result != VK_SUCCESS) { Logf("EnableVLayers failed to initialize, will continue without validation: %s", VkResultStr(result)); @@ -2794,22 +2857,22 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) { VkXlibSurfaceCreateInfoKHR surface_info = { sType: VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, - dpy: vk.platform_handles.display, - window: vk.platform_handles.window, + dpy: g_vk.platform_handles.display, + window: g_vk.platform_handles.window, }; - VkResult result = vkCreateXlibSurfaceKHR(vk.instance, &surface_info, null, &vk.surface); + VkResult result = vkCreateXlibSurfaceKHR(g_vk.instance, &surface_info, null, &g_vk.surface); } version(Windows) { VkWin32SurfaceCreateInfoKHR surface_info = { sType: VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, - hinstance: vk.platform_handles.instance, - hwnd: vk.platform_handles.handle, + hinstance: g_vk.platform_handles.instance, + hwnd: g_vk.platform_handles.handle, }; - VkResult result = vkCreateWin32SurfaceKHR(vk.instance, &surface_info, null, &vk.surface); + VkResult result = vkCreateWin32SurfaceKHR(g_vk.instance, &surface_info, null, &g_vk.surface); } success &= VkCheck("InitSurface failure", result); @@ -2819,12 +2882,12 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) { success = false; - Arena* arena = &vk.frame_arenas[0]; + Arena* arena = &g_vk.frame_arenas[0]; u32 count; - vkEnumeratePhysicalDevices(vk.instance, &count, null); + vkEnumeratePhysicalDevices(g_vk.instance, &count, null); VkPhysicalDevice[] devices = Alloc!(VkPhysicalDevice)(arena, count); - vkEnumeratePhysicalDevices(vk.instance, &count, devices.ptr); + vkEnumeratePhysicalDevices(g_vk.instance, &count, devices.ptr); VkPhysicalDevice physical_device = null; bool discrete_candidate = false; @@ -2865,7 +2928,7 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) { i32 idx = cast(i32)i; b32 surface_support; - vkGetPhysicalDeviceSurfaceSupportKHR(dev, cast(u32)i, vk.surface, &surface_support); + vkGetPhysicalDeviceSurfaceSupportKHR(dev, cast(u32)i, g_vk.surface, &surface_support); if(current.gfx_index < 0 && surface_support && prop.queueFlags & G_BIT) { @@ -2928,8 +2991,8 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) if(matched == VK_DEVICE_EXTENSIONS.length) { u32 fmt_count, present_count; - vkGetPhysicalDeviceSurfaceFormatsKHR(dev, vk.surface, &fmt_count, null); - vkGetPhysicalDeviceSurfacePresentModesKHR(dev, vk.surface, &present_count, null); + vkGetPhysicalDeviceSurfaceFormatsKHR(dev, g_vk.surface, &fmt_count, null); + vkGetPhysicalDeviceSurfacePresentModesKHR(dev, g_vk.surface, &present_count, null); discrete = props.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU; result = fmt_count && present_count; @@ -3066,17 +3129,17 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) pEnabledFeatures: null, }; - VkResult result = vkCreateDevice(physical_device, &device_info, null, &vk.device); + VkResult result = vkCreateDevice(physical_device, &device_info, null, &g_vk.device); if(result != VK_SUCCESS) { Logf("vkCreateDevices failure: %s", VkResultStr(result)); } else { - LoadDeviceFunctions(&vk); + LoadDeviceFunctions(); vkGetDeviceQueue( - vk.device, + g_vk.device, candidate.gfx_index, 0, &candidate.gfx_queue @@ -3085,7 +3148,7 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) if(!candidate.single_queue) { vkGetDeviceQueue( - vk.device, + g_vk.device, candidate.tfer_index, candidate.tfer_index == candidate.gfx_index ? 1 : 0, &candidate.tfer_queue @@ -3097,8 +3160,8 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) candidate.tfer_index = candidate.gfx_index; } - vk.physical_device = physical_device; - vk.queues = candidate; + g_vk.physical_device = physical_device; + g_vk.queues = candidate; success = true; } @@ -3116,28 +3179,28 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) flags: VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT, vulkanApiVersion: VK_MAKE_API_VERSION(0, 1, 2, 0), pVulkanFunctions: &vk_functions, - physicalDevice: vk.physical_device, - device: vk.device, - instance: vk.instance, + physicalDevice: g_vk.physical_device, + device: g_vk.device, + instance: g_vk.instance, }; - VkResult result = vmaCreateAllocator(&info, &vk.vma); + VkResult result = vmaCreateAllocator(&info, &g_vk.vma); success &= VkCheck("vmaCreateAllocator failure", result); } if(success) { - success = CreateSwapchain(&vk); + success = CreateSwapchain(); } if(success) { - success = CreateDrawImages(&vk); + success = CreateDrawImages(); } if(success) { - Arena* arena = &vk.frame_arenas[0]; + Arena* arena = &g_vk.frame_arenas[0]; VkSemaphoreCreateInfo sem_info = { sType: VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO }; @@ -3157,65 +3220,65 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) level: VK_COMMAND_BUFFER_LEVEL_PRIMARY, }; - u32 sem_count = cast(u32)vk.present_images.length; - vk.submit_sems = Alloc!(VkSemaphore)(arena, sem_count); + u32 sem_count = cast(u32)g_vk.present_images.length; + g_vk.submit_sems = Alloc!(VkSemaphore)(arena, sem_count); VkResult result; foreach(i; 0 .. sem_count) { - result = vkCreateSemaphore(vk.device, &sem_info, null, vk.submit_sems.ptr + i); + result = vkCreateSemaphore(g_vk.device, &sem_info, null, g_vk.submit_sems.ptr + i); success &= VkCheck("vkCreateSemaphore failure", result); } foreach(i; 0 .. FRAME_OVERLAP) { - pool_info.queueFamilyIndex = vk.gfx_index; + pool_info.queueFamilyIndex = g_vk.gfx_index; - result = vkCreateCommandPool(vk.device, &pool_info, null, vk.cmd_pools.ptr + i); + result = vkCreateCommandPool(g_vk.device, &pool_info, null, g_vk.cmd_pools.ptr + i); success &= VkCheck("vkCreateCommandPool failure", result); - cmd_info.commandPool = vk.cmd_pools[i]; + cmd_info.commandPool = g_vk.cmd_pools[i]; - result = vkAllocateCommandBuffers(vk.device, &cmd_info, vk.cmds.ptr + i); + result = vkAllocateCommandBuffers(g_vk.device, &cmd_info, g_vk.cmds.ptr + i); success &= VkCheck("vkAllocateCommandBuffers failure", result); - result = vkCreateFence(vk.device, &fence_info, null, vk.render_fences.ptr + i); + result = vkCreateFence(g_vk.device, &fence_info, null, g_vk.render_fences.ptr + i); success &= VkCheck("vkCreateFence failure", result); - result = vkCreateSemaphore(vk.device, &sem_info, null, vk.acquire_sems.ptr + i); + result = vkCreateSemaphore(g_vk.device, &sem_info, null, g_vk.acquire_sems.ptr + i); success &= VkCheck("vkCreateSemaphore failure", result); } - pool_info.queueFamilyIndex = vk.tfer_index; + pool_info.queueFamilyIndex = g_vk.tfer_index; - result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.imm_pool); + result = vkCreateCommandPool(g_vk.device, &pool_info, null, &g_vk.imm_pool); success &= VkCheck("vkCreateCommandPool failure", result); - cmd_info.commandPool = vk.imm_pool; + cmd_info.commandPool = g_vk.imm_pool; - result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.imm_cmd); + result = vkAllocateCommandBuffers(g_vk.device, &cmd_info, &g_vk.imm_cmd); success &= VkCheck("vkAllocateCommandBuffers failure", result); - result = vkCreateFence(vk.device, &fence_info, null, &vk.imm_fence); + result = vkCreateFence(g_vk.device, &fence_info, null, &g_vk.imm_fence); success &= VkCheck("vkCreateFence failure", result); - pool_info.queueFamilyIndex = vk.gfx_index; + pool_info.queueFamilyIndex = g_vk.gfx_index; - result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.comp_cmd_pool); + result = vkCreateCommandPool(g_vk.device, &pool_info, null, &g_vk.comp_cmd_pool); success &= VkCheck("vkCreateCommandPool failure", result); - cmd_info.commandPool = vk.comp_cmd_pool; + cmd_info.commandPool = g_vk.comp_cmd_pool; - result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.comp_cmd); + result = vkAllocateCommandBuffers(g_vk.device, &cmd_info, &g_vk.comp_cmd); success &= VkCheck("vkCreateCommandPool failure", result); - result = vkCreateFence(vk.device, &fence_info, null, &vk.comp_fence); + result = vkCreateFence(g_vk.device, &fence_info, null, &g_vk.comp_fence); success &= VkCheck("vkCreateFence failure", result); } if(success) { - PushDescriptorPool(&vk); + PushDescriptorPool(); if(success) { @@ -3224,29 +3287,29 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) { binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL }, ]; - vk.global_set_layout = CreateDescSetLayout(&vk, layout_bindings); - vk.global_set = AllocDescSet(&vk, vk.global_set_layout); + g_vk.global_set_layout = CreateDescSetLayout(layout_bindings); + g_vk.global_set = AllocDescSet(g_vk.global_set_layout); - Write(&vk, vk.global_set, [vk.draw_image]); + Write(g_vk.global_set, [g_vk.draw_image]); } } if(success) { - vk.pipeline_handles = Alloc!(PipelineHandles)(&vk.arena, 128); - vk.pipeline_count += 1; + g_vk.pipeline_handles = Alloc!(PipelineHandles)(&g_vk.arena, 128); + g_vk.pipeline_count += 1; u64 transfer_size = MB(64); - vk.transfer_buf = CreateMappedBuffer!(u8)(&vk, BT.Staging, transfer_size); + g_vk.transfer_buf = CreateMappedBuffer!(u8)(BT.Staging, transfer_size); VkDescriptorSetLayoutBinding[2] layout_bindings = [ { binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT }, { binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT }, ]; - vk.conv_desc_layout = CreateDescSetLayout(&vk, layout_bindings); - vk.conv_desc_set = AllocDescSet(&vk, vk.conv_desc_layout); - vk.conv_pipeline_layout = CreatePipelineLayout(&vk, vk.conv_desc_layout, ConvPushConst.sizeof, true); + g_vk.conv_desc_layout = CreateDescSetLayout(layout_bindings); + g_vk.conv_desc_set = AllocDescSet(g_vk.conv_desc_layout); + g_vk.conv_pipeline_layout = CreatePipelineLayout(g_vk.conv_desc_layout, ConvPushConst.sizeof, true); u32 channels = 1; SpecEntry[1] entries = [ @@ -3259,7 +3322,7 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) CompPipelineInfo conv_info = { shader: CONVERT_SHADER_BYTES, - layout: vk.conv_pipeline_layout, + layout: g_vk.conv_pipeline_layout, spec: { data: &channels, size: u32.sizeof, @@ -3267,18 +3330,18 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) }, }; - vk.r_to_rgba_pipeline = CreateComputePipeline(&vk, &conv_info); + g_vk.r_to_rgba_pipeline = CreateComputePipeline(&conv_info); channels = 2; - vk.rg_to_rgba_pipeline = CreateComputePipeline(&vk, &conv_info); + g_vk.rg_to_rgba_pipeline = CreateComputePipeline(&conv_info); channels = 3; - vk.rgb_to_rgba_pipeline = CreateComputePipeline(&vk, &conv_info); + g_vk.rgb_to_rgba_pipeline = CreateComputePipeline(&conv_info); VkAttachmentDescription[2] attach_descriptions = [ { - format: vk.draw_image.view.format, + format: g_vk.draw_image.view.format, samples: VK_SAMPLE_COUNT_1_BIT, loadOp: VK_ATTACHMENT_LOAD_OP_CLEAR, storeOp: VK_ATTACHMENT_STORE_OP_STORE, @@ -3288,7 +3351,7 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) finalLayout: VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, }, { - format: vk.depth_image.view.format, + format: g_vk.depth_image.view.format, samples: VK_SAMPLE_COUNT_1_BIT, loadOp: VK_ATTACHMENT_LOAD_OP_CLEAR, storeOp: VK_ATTACHMENT_STORE_OP_STORE, @@ -3334,20 +3397,18 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem) pDependencies: &self_dependency, }; - VkResult result = vkCreateRenderPass(vk.device, &pass_info, null, &vk.render_pass); + VkResult result = vkCreateRenderPass(g_vk.device, &pass_info, null, &g_vk.render_pass); VkCheckA("vkCreateRenderPass failure", result); - CreateFramebuffer(&vk); + CreateFramebuffer(); } assert(success, "Error initializing vulkan"); - - return vk; } version(VULKAN_RENDERER_TEST) unittest { PlatformHandles handles; - Vulkan vk = Init(handles, 10*1000*1000, 10*1000*1000); + Init(handles, 10*1000*1000, 10*1000*1000); } diff --git a/vulkan_funcs.d b/vulkan_funcs.d index 746372a..26fb5a9 100644 --- a/vulkan_funcs.d +++ b/vulkan_funcs.d @@ -1,6 +1,6 @@ public import vulkan_includes; import vulkan_util; -import vulkan : Vulkan, VULKAN_LIBS; +import vulkan : Vulkan, VULKAN_LIBS, g_vk; // Global Functions @@ -136,141 +136,141 @@ alias vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; alias vkCmdSetStencilOp = vkCmdSetStencilOpEXT; static string -LoadDeviceFunc(alias fn, alias vk)() +LoadDeviceFunc(alias fn)() { import std.format : format; import std.string : chompPrefix; enum string fn_str = __traits(identifier, fn); - enum string vk_str = __traits(identifier, vk); + enum string vk_str = __traits(identifier, g_vk); return format("%s = cast(typeof(%s))vkGetDeviceProcAddr(%s.device, \"%s\"); assert(%s != null, \"Function pointer %s is null\");", fn_str, fn_str, vk_str, fn_str, fn_str, fn_str); } static string -LoadInstanceFunc(alias fn, alias vk)() +LoadInstanceFunc(alias fn)() { import std.format : format; import std.string : chompPrefix; enum string fn_str = __traits(identifier, fn); - enum string vk_str = __traits(identifier, vk); + enum string vk_str = __traits(identifier, g_vk); return format("%s = cast(typeof(%s))vkGetInstanceProcAddr(%s.instance, \"%s\"); assert(%s != null, \"Function pointer %s is null\");", fn_str, fn_str, vk_str, fn_str, fn_str, fn_str); } void -LoadDeviceFunctions(Vulkan* vk) +LoadDeviceFunctions() { - mixin(LoadDeviceFunc!(vkCreateSwapchainKHR, vk)); - mixin(LoadDeviceFunc!(vkCmdBeginRenderPass, vk)); - mixin(LoadDeviceFunc!(vkCmdEndRenderPass, vk)); - mixin(LoadDeviceFunc!(vkCreateFramebuffer, vk)); - mixin(LoadDeviceFunc!(vkDestroyFramebuffer, vk)); - mixin(LoadDeviceFunc!(vkCreateRenderPass, vk)); - mixin(LoadDeviceFunc!(vkDestroyRenderPass, vk)); - mixin(LoadDeviceFunc!(vkCreateImage, vk)); - mixin(LoadDeviceFunc!(vkCreateImageView, vk)); - mixin(LoadDeviceFunc!(vkQueueSubmit2, vk)); - mixin(LoadDeviceFunc!(vkCreateBufferView, vk)); - mixin(LoadDeviceFunc!(vkGetSwapchainImagesKHR, vk)); - mixin(LoadDeviceFunc!(vkGetDeviceQueue, vk)); - mixin(LoadDeviceFunc!(vkCreateSemaphore, vk)); - mixin(LoadDeviceFunc!(vkAllocateCommandBuffers, vk)); - mixin(LoadDeviceFunc!(vkCreateCommandPool, vk)); - mixin(LoadDeviceFunc!(vkCmdPipelineBarrier, vk)); - mixin(LoadDeviceFunc!(vkCmdPipelineBarrier2, vk)); - mixin(LoadDeviceFunc!(vkCreateFence, vk)); - mixin(LoadDeviceFunc!(vkCreateDescriptorPool, vk)); - mixin(LoadDeviceFunc!(vkCreateDescriptorSetLayout, vk)); - mixin(LoadDeviceFunc!(vkAllocateDescriptorSets, vk)); - mixin(LoadDeviceFunc!(vkCreatePipelineLayout, vk)); - mixin(LoadDeviceFunc!(vkResetDescriptorPool, vk)); - mixin(LoadDeviceFunc!(vkCreateShaderModule, vk)); - mixin(LoadDeviceFunc!(vkCreateGraphicsPipelines, vk)); - mixin(LoadDeviceFunc!(vkCreateComputePipelines, vk)); - mixin(LoadDeviceFunc!(vkUpdateDescriptorSets, vk)); - mixin(LoadDeviceFunc!(vkDestroyDevice, vk)); - mixin(LoadDeviceFunc!(vkDestroyDescriptorPool, vk)); - mixin(LoadDeviceFunc!(vkDestroySwapchainKHR, vk)); - mixin(LoadDeviceFunc!(vkDestroyBufferView, vk)); - mixin(LoadDeviceFunc!(vkDestroyImage, vk)); - mixin(LoadDeviceFunc!(vkDestroyImageView, vk)); - mixin(LoadDeviceFunc!(vkDestroyCommandPool, vk)); - mixin(LoadDeviceFunc!(vkDestroySemaphore, vk)); - mixin(LoadDeviceFunc!(vkDestroyFence, vk)); - mixin(LoadDeviceFunc!(vkDestroyPipelineLayout, vk)); - mixin(LoadDeviceFunc!(vkDestroyPipeline, vk)); - mixin(LoadDeviceFunc!(vkWaitForFences, vk)); - mixin(LoadDeviceFunc!(vkBeginCommandBuffer, vk)); - mixin(LoadDeviceFunc!(vkEndCommandBuffer, vk)); - mixin(LoadDeviceFunc!(vkAcquireNextImageKHR, vk)); - mixin(LoadDeviceFunc!(vkCmdBindPipeline, vk)); - mixin(LoadDeviceFunc!(vkCmdBindDescriptorSets, vk)); - mixin(LoadDeviceFunc!(vkCmdDispatch, vk)); - mixin(LoadDeviceFunc!(vkCmdSetViewport, vk)); - mixin(LoadDeviceFunc!(vkCmdSetScissor, vk)); - mixin(LoadDeviceFunc!(vkCmdPushConstants, vk)); - mixin(LoadDeviceFunc!(vkCmdBindIndexBuffer, vk)); - mixin(LoadDeviceFunc!(vkCmdBindVertexBuffers, vk)); - mixin(LoadDeviceFunc!(vkCmdDrawIndexed, vk)); - mixin(LoadDeviceFunc!(vkCmdBlitImage, vk)); - mixin(LoadDeviceFunc!(vkCmdCopyBufferToImage, vk)); - mixin(LoadDeviceFunc!(vkCmdCopyBuffer, vk)); - mixin(LoadDeviceFunc!(vkResetFences, vk)); - mixin(LoadDeviceFunc!(vkResetCommandBuffer, vk)); - mixin(LoadDeviceFunc!(vkFreeCommandBuffers, vk)); - mixin(LoadDeviceFunc!(vkDestroyDescriptorSetLayout, vk)); - mixin(LoadDeviceFunc!(vkDestroyShaderModule, vk)); - mixin(LoadDeviceFunc!(vkQueuePresentKHR, vk)); - mixin(LoadDeviceFunc!(vkCmdDraw, vk)); - mixin(LoadDeviceFunc!(vkDeviceWaitIdle, vk)); - mixin(LoadDeviceFunc!(vkCmdClearColorImage, vk)); - mixin(LoadDeviceFunc!(vkCreateSampler, vk)); - mixin(LoadDeviceFunc!(vkDestroySampler, vk)); - mixin(LoadDeviceFunc!(vkGetBufferDeviceAddress, vk)); - mixin(LoadDeviceFunc!(vkWaitSemaphores, vk)); - mixin(LoadDeviceFunc!(vkQueueWaitIdle, vk)); - mixin(LoadDeviceFunc!(vkCmdSetStencilTestEnableEXT, vk)); - mixin(LoadDeviceFunc!(vkCmdSetStencilOpEXT, vk)); - mixin(LoadDeviceFunc!(vkCmdSetStencilReference, vk)); - mixin(LoadDeviceFunc!(vkCmdSetStencilCompareMask, vk)); - mixin(LoadDeviceFunc!(vkCmdSetStencilWriteMask, vk)); + mixin(LoadDeviceFunc!(vkCreateSwapchainKHR)); + mixin(LoadDeviceFunc!(vkCmdBeginRenderPass)); + mixin(LoadDeviceFunc!(vkCmdEndRenderPass)); + mixin(LoadDeviceFunc!(vkCreateFramebuffer)); + mixin(LoadDeviceFunc!(vkDestroyFramebuffer)); + mixin(LoadDeviceFunc!(vkCreateRenderPass)); + mixin(LoadDeviceFunc!(vkDestroyRenderPass)); + mixin(LoadDeviceFunc!(vkCreateImage)); + mixin(LoadDeviceFunc!(vkCreateImageView)); + mixin(LoadDeviceFunc!(vkQueueSubmit2)); + mixin(LoadDeviceFunc!(vkCreateBufferView)); + mixin(LoadDeviceFunc!(vkGetSwapchainImagesKHR)); + mixin(LoadDeviceFunc!(vkGetDeviceQueue)); + mixin(LoadDeviceFunc!(vkCreateSemaphore)); + mixin(LoadDeviceFunc!(vkAllocateCommandBuffers)); + mixin(LoadDeviceFunc!(vkCreateCommandPool)); + mixin(LoadDeviceFunc!(vkCmdPipelineBarrier)); + mixin(LoadDeviceFunc!(vkCmdPipelineBarrier2)); + mixin(LoadDeviceFunc!(vkCreateFence)); + mixin(LoadDeviceFunc!(vkCreateDescriptorPool)); + mixin(LoadDeviceFunc!(vkCreateDescriptorSetLayout)); + mixin(LoadDeviceFunc!(vkAllocateDescriptorSets)); + mixin(LoadDeviceFunc!(vkCreatePipelineLayout)); + mixin(LoadDeviceFunc!(vkResetDescriptorPool)); + mixin(LoadDeviceFunc!(vkCreateShaderModule)); + mixin(LoadDeviceFunc!(vkCreateGraphicsPipelines)); + mixin(LoadDeviceFunc!(vkCreateComputePipelines)); + mixin(LoadDeviceFunc!(vkUpdateDescriptorSets)); + mixin(LoadDeviceFunc!(vkDestroyDevice)); + mixin(LoadDeviceFunc!(vkDestroyDescriptorPool)); + mixin(LoadDeviceFunc!(vkDestroySwapchainKHR)); + mixin(LoadDeviceFunc!(vkDestroyBufferView)); + mixin(LoadDeviceFunc!(vkDestroyImage)); + mixin(LoadDeviceFunc!(vkDestroyImageView)); + mixin(LoadDeviceFunc!(vkDestroyCommandPool)); + mixin(LoadDeviceFunc!(vkDestroySemaphore)); + mixin(LoadDeviceFunc!(vkDestroyFence)); + mixin(LoadDeviceFunc!(vkDestroyPipelineLayout)); + mixin(LoadDeviceFunc!(vkDestroyPipeline)); + mixin(LoadDeviceFunc!(vkWaitForFences)); + mixin(LoadDeviceFunc!(vkBeginCommandBuffer)); + mixin(LoadDeviceFunc!(vkEndCommandBuffer)); + mixin(LoadDeviceFunc!(vkAcquireNextImageKHR)); + mixin(LoadDeviceFunc!(vkCmdBindPipeline)); + mixin(LoadDeviceFunc!(vkCmdBindDescriptorSets)); + mixin(LoadDeviceFunc!(vkCmdDispatch)); + mixin(LoadDeviceFunc!(vkCmdSetViewport)); + mixin(LoadDeviceFunc!(vkCmdSetScissor)); + mixin(LoadDeviceFunc!(vkCmdPushConstants)); + mixin(LoadDeviceFunc!(vkCmdBindIndexBuffer)); + mixin(LoadDeviceFunc!(vkCmdBindVertexBuffers)); + mixin(LoadDeviceFunc!(vkCmdDrawIndexed)); + mixin(LoadDeviceFunc!(vkCmdBlitImage)); + mixin(LoadDeviceFunc!(vkCmdCopyBufferToImage)); + mixin(LoadDeviceFunc!(vkCmdCopyBuffer)); + mixin(LoadDeviceFunc!(vkResetFences)); + mixin(LoadDeviceFunc!(vkResetCommandBuffer)); + mixin(LoadDeviceFunc!(vkFreeCommandBuffers)); + mixin(LoadDeviceFunc!(vkDestroyDescriptorSetLayout)); + mixin(LoadDeviceFunc!(vkDestroyShaderModule)); + mixin(LoadDeviceFunc!(vkQueuePresentKHR)); + mixin(LoadDeviceFunc!(vkCmdDraw)); + mixin(LoadDeviceFunc!(vkDeviceWaitIdle)); + mixin(LoadDeviceFunc!(vkCmdClearColorImage)); + mixin(LoadDeviceFunc!(vkCreateSampler)); + mixin(LoadDeviceFunc!(vkDestroySampler)); + mixin(LoadDeviceFunc!(vkGetBufferDeviceAddress)); + mixin(LoadDeviceFunc!(vkWaitSemaphores)); + mixin(LoadDeviceFunc!(vkQueueWaitIdle)); + mixin(LoadDeviceFunc!(vkCmdSetStencilTestEnableEXT)); + mixin(LoadDeviceFunc!(vkCmdSetStencilOpEXT)); + mixin(LoadDeviceFunc!(vkCmdSetStencilReference)); + mixin(LoadDeviceFunc!(vkCmdSetStencilCompareMask)); + mixin(LoadDeviceFunc!(vkCmdSetStencilWriteMask)); version(AMD_GPU) version(VULKAN_DEBUG) { - mixin(LoadDeviceFunc!(vkGetShaderInfoAMD, vk)); + mixin(LoadDeviceFunc!(vkGetShaderInfoAMD)); } } void -LoadInstanceFunctions(Vulkan* vk) +LoadInstanceFunctions() { - mixin(LoadInstanceFunc!(vkEnumeratePhysicalDevices, vk)); - mixin(LoadInstanceFunc!(vkDestroySurfaceKHR, vk)); - mixin(LoadInstanceFunc!(vkDestroyInstance, vk)); - mixin(LoadInstanceFunc!(vkCreateDevice, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceQueueFamilyProperties, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfaceSupportKHR, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceProperties, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceFeatures2, vk)); - mixin(LoadInstanceFunc!(vkEnumerateDeviceExtensionProperties, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfacePresentModesKHR, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfaceFormatsKHR, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfaceCapabilitiesKHR, vk)); - mixin(LoadInstanceFunc!(vkGetPhysicalDeviceImageFormatProperties, vk)); - mixin(LoadInstanceFunc!(vkGetDeviceProcAddr, vk)); + mixin(LoadInstanceFunc!(vkEnumeratePhysicalDevices)); + mixin(LoadInstanceFunc!(vkDestroySurfaceKHR)); + mixin(LoadInstanceFunc!(vkDestroyInstance)); + mixin(LoadInstanceFunc!(vkCreateDevice)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceQueueFamilyProperties)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfaceSupportKHR)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceProperties)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceFeatures2)); + mixin(LoadInstanceFunc!(vkEnumerateDeviceExtensionProperties)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfacePresentModesKHR)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfaceFormatsKHR)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceSurfaceCapabilitiesKHR)); + mixin(LoadInstanceFunc!(vkGetPhysicalDeviceImageFormatProperties)); + mixin(LoadInstanceFunc!(vkGetDeviceProcAddr)); version(linux) { - mixin(LoadInstanceFunc!(vkCreateXlibSurfaceKHR, vk)); + mixin(LoadInstanceFunc!(vkCreateXlibSurfaceKHR)); } version(Windows) { - mixin(LoadInstanceFunc!(vkCreateWin32SurfaceKHR, vk)); + mixin(LoadInstanceFunc!(vkCreateWin32SurfaceKHR)); } version(VULKAN_DEBUG) { - mixin(LoadInstanceFunc!(vkCreateDebugUtilsMessengerEXT, vk)); - mixin(LoadInstanceFunc!(vkDestroyDebugUtilsMessengerEXT, vk)); + mixin(LoadInstanceFunc!(vkCreateDebugUtilsMessengerEXT)); + mixin(LoadInstanceFunc!(vkDestroyDebugUtilsMessengerEXT)); } }