3266 lines
80 KiB
D
3266 lines
80 KiB
D
import vulkan_funcs;
|
|
import vulkan_logging;
|
|
import vulkan_util;
|
|
import std.stdio;
|
|
import std.algorithm.comparison;
|
|
import core.stdc.string : strcmp, memcpy;
|
|
import core.stdc.stdio : Printf = printf;
|
|
import std.format : sformat;
|
|
import std.math.rounding : Ceil = ceil;
|
|
|
|
debug
|
|
{
|
|
const BUILD_DEBUG = true;
|
|
}
|
|
|
|
alias InitRenderer = Init;
|
|
alias Renderer = Vulkan;
|
|
alias Shader = VkShaderModule;
|
|
alias Pipeline = u32;
|
|
alias Attribute = VkVertexInputAttributeDescription;
|
|
alias SpecEntry = VkSpecializationMapEntry;
|
|
alias RenderPass = VkRenderPass;
|
|
alias DescSetLayout = VkDescriptorSetLayout;
|
|
alias PipelineLayout = VkPipelineLayout;
|
|
alias DescLayoutBinding = VkDescriptorSetLayoutBinding;
|
|
alias DescWrite = VkWriteDescriptorSet;
|
|
|
|
struct DescSet
|
|
{
|
|
VkDescriptorSet handle;
|
|
u32 dynamic_count;
|
|
}
|
|
|
|
bool g_VLAYER_SUPPORT = false;
|
|
bool g_DEBUG_PRINTF = false;
|
|
|
|
const FRAME_OVERLAP = 2;
|
|
const MAX_SETS = 1000;
|
|
const DESC_ARRAY_SIZE = 256;
|
|
|
|
version(linux)
|
|
{
|
|
const string[] VULKAN_LIBS = [
|
|
"libvulkan.so.1",
|
|
"libvulkan.so",
|
|
];
|
|
|
|
struct PlatformHandles
|
|
{
|
|
xcb_connection_t *conn;
|
|
xcb_window_t window;
|
|
}
|
|
}
|
|
|
|
version(Windows)
|
|
{
|
|
const string[] VULKAN_LIBS = [
|
|
"vulkan-1.dll",
|
|
];
|
|
|
|
struct PlatformHandles
|
|
{
|
|
HINSTANCE hinstance;
|
|
HWND hwnd;
|
|
}
|
|
}
|
|
|
|
const char*[] VK_INSTANCE_LAYERS = [];
|
|
const char*[] VK_INSTANCE_LAYERS_DEBUG = [ "VK_LAYER_KHRONOS_validation" ];
|
|
|
|
version(linux)
|
|
{
|
|
const char*[] VK_INSTANCE_EXT = [ cast(char*)VK_KHR_SURFACE_EXTENSION_NAME, cast(char*)VK_KHR_XCB_SURFACE_EXTENSION_NAME ];
|
|
const char*[] VK_INSTANCE_EXT_DEBUG = VK_INSTANCE_EXT ~ [ cast(char*)VK_EXT_DEBUG_UTILS_EXTENSION_NAME ];
|
|
}
|
|
|
|
version(Windows)
|
|
{
|
|
const char*[] VK_INSTANCE_EXT = [ cast(char*)VK_KHR_SURFACE_EXTENSION_NAME, cast(char*)VK_KHR_WIN32_SURFACE_EXTENSION_NAME ];
|
|
const char*[] VK_INSTANCE_EXT_DEBUG = VK_INSTANCE_EXT ~ [ cast(char*)VK_EXT_DEBUG_UTILS_EXTENSION_NAME ];
|
|
}
|
|
|
|
const char*[] VK_BASE_DEVICE_EXTENSIONS = [
|
|
cast(char*)VK_KHR_SWAPCHAIN_EXTENSION_NAME,
|
|
cast(char*)VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME,
|
|
cast(char*)VK_KHR_8BIT_STORAGE_EXTENSION_NAME,
|
|
cast(char*)VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME,
|
|
];
|
|
|
|
const char*[] VK_AMD_DEVICE_EXTENSIONS = [
|
|
cast(char*)VK_AMD_SHADER_INFO_EXTENSION_NAME,
|
|
];
|
|
|
|
version(AMD_GPU)
|
|
{
|
|
debug
|
|
{
|
|
const char*[] VK_DEVICE_EXTENSIONS = VK_AMD_DEVICE_EXTENSIONS ~ VK_BASE_DEVICE_EXTENSIONS;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
const char*[] VK_DEVICE_EXTENSIONS = VK_BASE_DEVICE_EXTENSIONS;
|
|
}
|
|
|
|
const VkFormat[] VK_IMAGE_FORMATS = [
|
|
VK_FORMAT_R16G16B16A16_UNORM,
|
|
VK_FORMAT_R8G8B8A8_UNORM,
|
|
];
|
|
|
|
enum ShaderStage : VkShaderStageFlagBits
|
|
{
|
|
None = cast(VkShaderStageFlagBits)0,
|
|
Vertex = VK_SHADER_STAGE_VERTEX_BIT,
|
|
Fragment = VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
Compute = VK_SHADER_STAGE_COMPUTE_BIT,
|
|
}
|
|
|
|
alias SS = ShaderStage;
|
|
|
|
enum InputRate : int
|
|
{
|
|
Vertex = VK_VERTEX_INPUT_RATE_VERTEX,
|
|
Instance = VK_VERTEX_INPUT_RATE_INSTANCE,
|
|
}
|
|
|
|
alias IR = InputRate;
|
|
|
|
enum ImageUsage : VkImageUsageFlagBits
|
|
{
|
|
None = cast(VkImageUsageFlagBits)0,
|
|
Draw = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
|
|
VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
|
|
Depth = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
|
|
Texture = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
|
|
Convert = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
|
|
Storage = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
|
|
Swapchain = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
|
|
}
|
|
|
|
alias IU = ImageUsage;
|
|
|
|
enum Format : VkFormat
|
|
{
|
|
UINT = VK_FORMAT_R32_UINT,
|
|
R_F32 = VK_FORMAT_R32_SFLOAT,
|
|
RG_F32 = VK_FORMAT_R32G32_SFLOAT,
|
|
RGB_F32 = VK_FORMAT_R32G32B32_SFLOAT,
|
|
RGBA_F32 = VK_FORMAT_R32G32B32A32_SFLOAT,
|
|
RGBA_UINT = VK_FORMAT_B8G8R8A8_UINT,
|
|
R_U32 = VK_FORMAT_R32_UINT,
|
|
RG_U32 = VK_FORMAT_R32G32_UINT,
|
|
RGBA_UNORM = VK_FORMAT_R8G8B8A8_UNORM,
|
|
RGBA_SRGB = VK_FORMAT_R8G8B8A8_SRGB,
|
|
D_SF32 = VK_FORMAT_D32_SFLOAT,
|
|
}
|
|
|
|
alias FMT = Format;
|
|
|
|
enum BufferType : VkBufferUsageFlagBits
|
|
{
|
|
None = cast(VkBufferUsageFlagBits)0,
|
|
Vertex = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
|
|
Index = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
|
|
Uniform = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
|
|
Storage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
|
|
Staging = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
|
|
BufferView = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
|
|
}
|
|
|
|
alias BT = BufferType;
|
|
|
|
enum ImageLayout : VkImageLayout
|
|
{
|
|
Undefined = VK_IMAGE_LAYOUT_UNDEFINED,
|
|
General = VK_IMAGE_LAYOUT_GENERAL,
|
|
ColorAttach = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
|
}
|
|
|
|
struct Image
|
|
{
|
|
VkImage image;
|
|
VmaAllocation alloc;
|
|
Format format;
|
|
VkImageLayout layout;
|
|
u32 w;
|
|
u32 h;
|
|
bool depth_image;
|
|
ImageUsage usage;
|
|
}
|
|
|
|
struct BufferView
|
|
{
|
|
Buffer base;
|
|
VkBufferView view;
|
|
|
|
alias base this;
|
|
}
|
|
|
|
struct ImageView
|
|
{
|
|
Image base;
|
|
VkImageView view;
|
|
|
|
alias base this;
|
|
}
|
|
|
|
struct Buffer
|
|
{
|
|
VkBuffer buffer;
|
|
VmaAllocation alloc;
|
|
u64 size;
|
|
bool dynamic;
|
|
}
|
|
|
|
struct ShaderUniforms
|
|
{
|
|
f32 placeholder;
|
|
}
|
|
|
|
enum PipelineType : int
|
|
{
|
|
Graphics,
|
|
Compute,
|
|
}
|
|
|
|
alias PT = PipelineType;
|
|
|
|
struct Specialization
|
|
{
|
|
u64 size;
|
|
SpecEntry[] entries;
|
|
void* data;
|
|
}
|
|
|
|
struct GfxPipelineInfo
|
|
{
|
|
u8[] vertex_shader;
|
|
u8[] frag_shader;
|
|
InputRate input_rate;
|
|
u32 input_rate_stride;
|
|
Attribute[] vertex_attributes;
|
|
Specialization vert_spec;
|
|
Specialization frag_spec;
|
|
bool self_dependency;
|
|
PipelineLayout layout;
|
|
}
|
|
|
|
struct CompPipelineInfo
|
|
{
|
|
u8[] shader;
|
|
Specialization spec;
|
|
PipelineLayout layout;
|
|
}
|
|
|
|
enum StepInitialized : u32
|
|
{
|
|
Renderer = 1,
|
|
Instance,
|
|
Debug,
|
|
Surface,
|
|
Device,
|
|
Vma,
|
|
Buffers,
|
|
FrameStructures,
|
|
Swapchain,
|
|
DrawImages,
|
|
DescriptorPools,
|
|
Pipelines,
|
|
}
|
|
|
|
alias SI = StepInitialized;
|
|
|
|
enum DescType : u32
|
|
{
|
|
|
|
|
|
Max,
|
|
}
|
|
|
|
alias DT = DescType;
|
|
|
|
struct MappedBuffer(T)
|
|
{
|
|
Buffer base;
|
|
T[] data;
|
|
u64 offset;
|
|
|
|
alias base this;
|
|
}
|
|
|
|
struct Vulkan
|
|
{
|
|
Arena arena;
|
|
Arena[FRAME_OVERLAP] frame_arenas;
|
|
|
|
u32 frame_index;
|
|
u32 semaphore_index;
|
|
|
|
SLList!(SI) cleanup_list;
|
|
|
|
PlatformHandles platform_handles;
|
|
u32 window_h, window_w;
|
|
|
|
VkDebugUtilsMessengerEXT dbg_msg;
|
|
VkInstance instance;
|
|
VkSurfaceKHR surface;
|
|
VkPhysicalDevice physical_device;
|
|
VkDevice device;
|
|
VmaAllocator vma;
|
|
VkSwapchainKHR swapchain;
|
|
|
|
VkSurfaceFormatKHR surface_format;
|
|
VkPresentModeKHR present_mode;
|
|
VkExtent3D swapchain_extent;
|
|
|
|
VkRenderPass render_pass;
|
|
VkFramebuffer framebuffer;
|
|
ImageView[] present_images;
|
|
u32 image_index;
|
|
|
|
ImageView draw_image;
|
|
ImageView depth_image;
|
|
|
|
VkCommandPool[FRAME_OVERLAP] cmd_pools;
|
|
VkCommandBuffer[FRAME_OVERLAP] cmds;
|
|
|
|
VkCommandPool comp_cmd_pool;
|
|
VkCommandBuffer comp_cmd;
|
|
VkFence comp_fence;
|
|
|
|
VkSemaphore[] submit_sems;
|
|
VkSemaphore[FRAME_OVERLAP] acquire_sems;
|
|
VkFence[FRAME_OVERLAP] render_fences;
|
|
|
|
VkCommandPool imm_pool;
|
|
VkCommandBuffer imm_cmd;
|
|
VkFence imm_fence;
|
|
|
|
VkDescriptorPool active_pool;
|
|
SLList!(VkDescriptorPool) full_pools;
|
|
|
|
VkSampler nearest_sampler;
|
|
VkSampler oit_sampler;
|
|
|
|
PipelineHandles[] pipeline_handles;
|
|
u32 pipeline_count;
|
|
VkPipeline[FRAME_OVERLAP] last_pipeline;
|
|
DescSetLayout global_set_layout;
|
|
DescSet global_set;
|
|
|
|
MappedBuffer!(u8) transfer_buf;
|
|
|
|
QueueInfo queues;
|
|
|
|
Pipeline r_to_rgba_pipeline;
|
|
Pipeline rg_to_rgba_pipeline;
|
|
Pipeline rgb_to_rgba_pipeline;
|
|
DescSet conv_desc_set;
|
|
VkDescriptorSetLayout conv_desc_layout;
|
|
VkPipelineLayout conv_pipeline_layout;
|
|
|
|
VkPipeline last_comp_pipeline;
|
|
bool compute_pass_started;
|
|
|
|
BufferView a_buffer_view;
|
|
ImageView aux_image;
|
|
|
|
alias queues this;
|
|
}
|
|
|
|
struct ConvPushConst
|
|
{
|
|
u32 x;
|
|
u32 y;
|
|
}
|
|
|
|
struct PipelineHandles
|
|
{
|
|
VkPipeline handle;
|
|
VkPipelineBindPoint type;
|
|
VkPipelineLayout layout;
|
|
Pipeline index;
|
|
}
|
|
|
|
struct QueueInfo
|
|
{
|
|
i32 gfx_index, tfer_index;
|
|
VkQueue gfx_queue, tfer_queue;
|
|
bool single_queue;
|
|
}
|
|
|
|
const u8[] CONVERT_SHADER = mixin(Embed(convert_shader));
|
|
|
|
Vulkan
|
|
Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem, string convert_shader)
|
|
{
|
|
bool success = true;
|
|
|
|
Vulkan vk = {
|
|
arena: CreateArena(permanent_mem),
|
|
frame_arenas: [
|
|
CreateArena(frame_mem),
|
|
CreateArena(frame_mem),
|
|
],
|
|
platform_handles: platform_handles,
|
|
};
|
|
|
|
Push(&vk, SI.Renderer);
|
|
|
|
success = LoadGlobalFunctions();
|
|
|
|
if (success) success = InitInstance(&vk);
|
|
|
|
if (success)
|
|
{
|
|
LoadInstanceFunctions(&vk);
|
|
EnableVLayers(&vk);
|
|
}
|
|
|
|
if (success) success = InitSurface(&vk);
|
|
if (success) success = InitDevice(&vk);
|
|
if (success) success = InitVMA(&vk);
|
|
if (success) success = CreateSwapchain(&vk);
|
|
if (success) success = CreateDrawImages(&vk);
|
|
if (success) success = InitFrameStructures(&vk);
|
|
if (success) InitDescriptors(&vk);
|
|
if (success) success = InitGlobalDescSet(&vk);
|
|
if (success) InitPipelines(&vk);
|
|
if (success) InitBuffers(&vk);
|
|
if (success) InitConversionPipeline(&vk);
|
|
if (success) InitFramebufferAndRenderPass(&vk);
|
|
|
|
assert(success, "Error initializing vulkan");
|
|
|
|
return vk;
|
|
}
|
|
|
|
void
|
|
InitPipelines(Vulkan* vk)
|
|
{
|
|
vk.pipeline_handles = AllocArray!(PipelineHandles)(&vk.arena, 128);
|
|
vk.pipeline_count += 1;
|
|
}
|
|
|
|
DescSetLayout
|
|
CreateDescSetLayout(Vulkan* vk, DescLayoutBinding[] bindings)
|
|
{
|
|
VkDescriptorSetLayoutCreateInfo layout_info = {
|
|
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
|
|
bindingCount: cast(u32)bindings.length,
|
|
pBindings: bindings.ptr,
|
|
};
|
|
|
|
DescSetLayout layout;
|
|
VkResult result = vkCreateDescriptorSetLayout(vk.device, &layout_info, null, &layout);
|
|
VkCheckA("vkCreateDescriptorSetLayout failure", result);
|
|
|
|
return layout;
|
|
}
|
|
|
|
DescSet
|
|
AllocDescSet(Vulkan* vk, DescSetLayout layout, u32 dynamic_count = 0)
|
|
{
|
|
VkDescriptorSetAllocateInfo alloc_info = {
|
|
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
|
|
descriptorSetCount: 1,
|
|
pSetLayouts: &layout,
|
|
descriptorPool: vk.active_pool,
|
|
};
|
|
|
|
DescSet set = {
|
|
dynamic_count: dynamic_count,
|
|
};
|
|
VkResult result = vkAllocateDescriptorSets(vk.device, &alloc_info, &set.handle);
|
|
if (result == VK_ERROR_OUT_OF_POOL_MEMORY || result == VK_ERROR_FRAGMENTED_POOL)
|
|
{
|
|
PushDescriptorPool(vk);
|
|
|
|
alloc_info.descriptorPool = vk.active_pool;
|
|
|
|
result = vkAllocateDescriptorSets(vk.device, &alloc_info, &set.handle);
|
|
VkCheckA("vkAllocateDescriptorSets failure", result); // TODO: look more into how to handle this
|
|
}
|
|
|
|
return set;
|
|
}
|
|
|
|
PipelineLayout
|
|
CreatePipelineLayout(T)(Vulkan* vk, T layouts, u32 push_const_size, bool compute = false) if (is(T: DescSetLayout) || is(T: DescSetLayout[]))
|
|
{
|
|
VkShaderStageFlagBits stage = (compute ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
|
|
|
|
DescSetLayout[] desc_layouts;
|
|
|
|
static if (is(T: DescSetLayout))
|
|
{
|
|
desc_layouts = AllocArray!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], 2);
|
|
desc_layouts[0] = vk.global_set_layout;
|
|
desc_layouts[1] = layouts;
|
|
}
|
|
else static if (is(T: DescSetLayout[]))
|
|
{
|
|
desc_layouts = AllocArray!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], layouts.length + 1);
|
|
desc_layouts[0] = vk.global_set_layout;
|
|
|
|
foreach(i; 1 .. layouts.length)
|
|
{
|
|
desc_layouts[i] = layouts[i-1];
|
|
}
|
|
}
|
|
|
|
VkPushConstantRange const_range = {
|
|
offset: 0,
|
|
size: cast(VkDeviceSize)push_const_size,
|
|
stageFlags: stage,
|
|
};
|
|
|
|
VkPipelineLayoutCreateInfo layout_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
|
|
setLayoutCount: cast(u32)desc_layouts.length,
|
|
pSetLayouts: desc_layouts.ptr,
|
|
pushConstantRangeCount: (push_const_size > 0 ? 1 : 0),
|
|
pPushConstantRanges: (push_const_size > 0 ? &const_range : null),
|
|
};
|
|
|
|
PipelineLayout layout;
|
|
VkResult result = vkCreatePipelineLayout(vk.device, &layout_info, null, &layout);
|
|
VkCheckA("CreatePipelineLayout failure", result);
|
|
|
|
return layout;
|
|
}
|
|
|
|
void
|
|
InitConversionPipeline(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.Pipelines);
|
|
|
|
VkDescriptorSetLayoutBinding[2] layout_bindings = [
|
|
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT },
|
|
{ binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT },
|
|
];
|
|
|
|
vk.conv_desc_layout = CreateDescSetLayout(vk, layout_bindings);
|
|
vk.conv_desc_set = AllocDescSet(vk, vk.conv_desc_layout);
|
|
vk.conv_pipeline_layout = CreatePipelineLayout(vk, vk.conv_desc_layout, ConvPushConst.sizeof, true);
|
|
|
|
u32 channels = 1;
|
|
SpecEntry[1] entries = [
|
|
{
|
|
constantID: 0,
|
|
size: u32.sizeof,
|
|
offset: 0,
|
|
}
|
|
];
|
|
CompPipelineInfo conv_info = {
|
|
shader: CONVERT_SHADER,
|
|
layout: vk.conv_pipeline_layout,
|
|
spec: {
|
|
data: &channels,
|
|
size: u32.sizeof,
|
|
entries: entries,
|
|
},
|
|
};
|
|
|
|
vk.r_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info);
|
|
|
|
channels = 2;
|
|
vk.rg_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info);
|
|
|
|
channels = 3;
|
|
vk.rgb_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info);
|
|
}
|
|
|
|
void
|
|
CreateBuffer(Vulkan* vk, Buffer* buf, BufferType type, u64 size, bool host_visible, bool dynamic = false)
|
|
{
|
|
assert(type != BT.None, "CreateBuffer failure: type is None");
|
|
|
|
u64 buffer_size = (dynamic ? size * FRAME_OVERLAP : size);
|
|
|
|
VkBufferCreateInfo buffer_info = {
|
|
sType: VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
|
usage: type,
|
|
size: buffer_size,
|
|
};
|
|
|
|
VmaAllocationCreateInfo alloc_info = {
|
|
usage: VMA_MEMORY_USAGE_UNKNOWN,
|
|
flags: VMA_ALLOCATION_CREATE_MAPPED_BIT,
|
|
};
|
|
|
|
if (host_visible)
|
|
{
|
|
alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
|
|
alloc_info.preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
|
|
}
|
|
else
|
|
{
|
|
buffer_info.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
|
|
alloc_info.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
|
}
|
|
|
|
u32[2] indices = [vk.queues.gfx_index, vk.queues.tfer_index];
|
|
if (vk.queues.gfx_index != vk.queues.tfer_index)
|
|
{
|
|
buffer_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
|
|
buffer_info.queueFamilyIndexCount = 2;
|
|
buffer_info.pQueueFamilyIndices = indices.ptr;
|
|
}
|
|
|
|
VmaAllocationInfo vma_info;
|
|
VkResult result = vmaCreateBuffer(vk.vma, &buffer_info, &alloc_info, &buf.buffer, &buf.alloc, &vma_info);
|
|
// TODO: handle errors here then reallocate buffer
|
|
assert(VkCheck("CreateBuffer failure: vmaCreateBuffer error", result), "CreateBuffer failure");
|
|
|
|
buf.size = buffer_size;
|
|
buf.dynamic = dynamic;
|
|
}
|
|
|
|
void
|
|
InitBuffers(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.Buffers);
|
|
|
|
u64 transfer_size = MB(64);
|
|
vk.transfer_buf = CreateMappedBuffer!(u8)(vk, BT.Staging, transfer_size);
|
|
}
|
|
|
|
void
|
|
BindBuffers(Vulkan* vk, Buffer* index_buffer, Buffer* vertex_buffer)
|
|
{
|
|
VkDeviceSize offset = 0;
|
|
|
|
vkCmdBindIndexBuffer(vk.cmds[vk.frame_index], index_buffer.buffer, 0, VK_INDEX_TYPE_UINT32);
|
|
vkCmdBindVertexBuffers(vk.cmds[vk.frame_index], 0, 1, &vertex_buffer.buffer, &offset);
|
|
}
|
|
|
|
MappedBuffer!(T)
|
|
CreateMappedBuffer(T)(Vulkan* vk, BufferType type, u64 count)
|
|
{
|
|
MappedBuffer!(T) buf;
|
|
CreateBuffer(vk, &buf.base, type, T.sizeof * count, true);
|
|
buf.data = MapBuffer!(T)(vk, &buf.base, count);
|
|
return buf;
|
|
}
|
|
|
|
T[]
|
|
MapBuffer(T)(Vulkan* vk, Buffer* buffer, u64 count)
|
|
{
|
|
void* ptr;
|
|
vmaMapMemory(vk.vma, buffer.alloc, &ptr);
|
|
return (cast(T*)ptr)[0 .. count];
|
|
}
|
|
|
|
VkImage
|
|
CurrentImage(Vulkan* vk)
|
|
{
|
|
return vk.present_images[vk.image_index].image;
|
|
}
|
|
|
|
void
|
|
BeginFrame(Vulkan* vk)
|
|
{
|
|
// TODO: move vkWaitForFences so it no longer holds up the frame, will need to change how fences are handled in regards to images though
|
|
VkResult result = vkWaitForFences(vk.device, 1, vk.render_fences.ptr + vk.frame_index, VK_TRUE, 1000000000);
|
|
VkCheckA("BeginFrame failure: vkWaitForFences error", result);
|
|
|
|
result = vkResetFences(vk.device, 1, vk.render_fences.ptr + vk.frame_index);
|
|
VkCheckA("BeginFrame failure: vkResetFences error", result);
|
|
|
|
result = vkAcquireNextImageKHR(vk.device, vk.swapchain, 1000000000, vk.acquire_sems[vk.frame_index], null, &vk.image_index);
|
|
if (result == VK_ERROR_OUT_OF_DATE_KHR)
|
|
{
|
|
RecreateSwapchain(vk);
|
|
}
|
|
else if (result != VK_SUBOPTIMAL_KHR)
|
|
{
|
|
VkCheckA("BeginFrame failure: vkAcquireNextImageKHR error", result);
|
|
}
|
|
|
|
result = vkResetCommandBuffer(vk.cmds[vk.frame_index], 0);
|
|
VkCheckA("BeginFrame failure: vkResetCommandBuffer failure", result);
|
|
|
|
VkCommandBufferBeginInfo cmd_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
|
|
flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
|
};
|
|
|
|
result = vkBeginCommandBuffer(vk.cmds[vk.frame_index], &cmd_info);
|
|
VkCheckA("BeginFrame failure: vkBeginCommandBuffer error", result);
|
|
}
|
|
|
|
void
|
|
BeginRendering(Vulkan* vk)
|
|
{
|
|
Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
|
|
Transition(vk.cmds[vk.frame_index], &vk.depth_image, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL);
|
|
|
|
VkImage image = CurrentImage(vk);
|
|
Transition(vk.cmds[vk.frame_index], image, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
|
|
|
VkClearValue[2] clear_color = [
|
|
{
|
|
color: {
|
|
float32: [0.0, 0.0, 0.0, 1.0],
|
|
},
|
|
},
|
|
{
|
|
color: {
|
|
float32: [0.0, 0.0, 0.0, 0.0],
|
|
},
|
|
},
|
|
];
|
|
|
|
VkRenderPassBeginInfo pass_info = {
|
|
sType: VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
|
|
renderPass: vk.render_pass,
|
|
framebuffer: vk.framebuffer,
|
|
renderArea: {
|
|
offset: {
|
|
x: 0,
|
|
y: 0,
|
|
},
|
|
extent: {
|
|
width: vk.swapchain_extent.width,
|
|
height: vk.swapchain_extent.height,
|
|
},
|
|
},
|
|
clearValueCount: cast(u32)clear_color.length,
|
|
pClearValues: clear_color.ptr,
|
|
};
|
|
|
|
vkCmdBeginRenderPass(vk.cmds[vk.frame_index], &pass_info, VK_SUBPASS_CONTENTS_INLINE);
|
|
}
|
|
|
|
pragma(inline): void
|
|
ResizeDrawImageIfNeeded(Vulkan* vk, ImageView* view)
|
|
{
|
|
UVec2 ext = GetExtent(vk);
|
|
|
|
if (view.w != ext.x || view.h != ext.y)
|
|
{
|
|
Destroy(vk, view);
|
|
CreateImageView(vk, view, ext.x, ext.y, view.format, view.usage, view.depth_image);
|
|
}
|
|
}
|
|
|
|
pragma(inline): u32[2]
|
|
GetExtent(Vulkan* vk)
|
|
{
|
|
u32[2] extent;
|
|
extent[0] = vk.swapchain_extent.width;
|
|
extent[1] = vk.swapchain_extent.height;
|
|
return extent;
|
|
}
|
|
|
|
pragma(inline): f32
|
|
GetAspect(Vulkan* vk)
|
|
{
|
|
return cast(f32)(vk.swapchain_extent.width) / cast(f32)(vk.swapchain_extent.height);
|
|
}
|
|
|
|
void
|
|
PrepAuxImage(Vulkan* vk, ImageView* view)
|
|
{
|
|
Transition(vk.cmds[vk.frame_index], view, VK_IMAGE_LAYOUT_GENERAL);
|
|
}
|
|
|
|
void
|
|
PrepComputeDrawImage(Vulkan* vk)
|
|
{
|
|
Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_GENERAL);
|
|
}
|
|
|
|
void
|
|
FinishRendering(Vulkan* vk)
|
|
{
|
|
vkCmdEndRenderPass(vk.cmds[vk.frame_index]);
|
|
}
|
|
|
|
void
|
|
SubmitAndPresent(Vulkan* vk)
|
|
{
|
|
scope(exit)
|
|
{
|
|
vk.last_pipeline[vk.frame_index] = null;
|
|
vk.frame_index = (vk.frame_index + 1) % FRAME_OVERLAP;
|
|
}
|
|
|
|
VkImage image = CurrentImage(vk);
|
|
VkSemaphore acquire_sem = vk.acquire_sems[vk.frame_index];
|
|
VkSemaphore submit_sem = vk.submit_sems[vk.image_index];
|
|
|
|
Transition(vk.cmds[vk.frame_index], &vk.draw_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
|
|
|
|
VkExtent2D extent = {
|
|
width: vk.swapchain_extent.width,
|
|
height: vk.swapchain_extent.height,
|
|
};
|
|
|
|
// TODO: Find out how to copy from same dimension images (pretty sure its not blitting)
|
|
Copy(vk.cmds[vk.frame_index], &vk.draw_image.base, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, extent, extent);
|
|
|
|
Transition(vk.cmds[vk.frame_index], image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
|
|
|
|
VkResult result = vkEndCommandBuffer(vk.cmds[vk.frame_index]);
|
|
VkCheckA("FinishFrame failure: vkEndCommandBuffer error", result);
|
|
|
|
VkCommandBufferSubmitInfo cmd_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
|
|
commandBuffer: vk.cmds[vk.frame_index],
|
|
};
|
|
|
|
VkSemaphoreSubmitInfo wait_info = {
|
|
sType: VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
|
|
semaphore: acquire_sem,
|
|
stageMask: VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT,
|
|
value: 1,
|
|
};
|
|
|
|
VkSemaphoreSubmitInfo signal_info = {
|
|
sType: VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
|
|
semaphore: submit_sem,
|
|
stageMask: VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT,
|
|
value: 1,
|
|
};
|
|
|
|
VkSubmitInfo2 submit_info = {
|
|
sType: VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
|
|
waitSemaphoreInfoCount: 1,
|
|
pWaitSemaphoreInfos: &wait_info,
|
|
signalSemaphoreInfoCount: 1,
|
|
pSignalSemaphoreInfos: &signal_info,
|
|
commandBufferInfoCount: 1,
|
|
pCommandBufferInfos: &cmd_info,
|
|
};
|
|
|
|
result = vkQueueSubmit2(vk.queues.gfx_queue, 1, &submit_info, vk.render_fences[vk.frame_index]);
|
|
VkCheckA("FinishFrame failure: vkQueueSubmit2 error", result);
|
|
|
|
VkPresentInfoKHR present_info = {
|
|
sType: VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
|
|
swapchainCount: 1,
|
|
pSwapchains: &vk.swapchain,
|
|
waitSemaphoreCount: 1,
|
|
pWaitSemaphores: &submit_sem,
|
|
pImageIndices: &vk.image_index,
|
|
};
|
|
|
|
result = vkQueuePresentKHR(vk.queues.gfx_queue, &present_info);
|
|
if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR)
|
|
{
|
|
RecreateSwapchain(vk);
|
|
}
|
|
else
|
|
{
|
|
VkCheckA("FinishFrame failure: vkQueuePresentKHR failure", result);
|
|
}
|
|
}
|
|
|
|
void
|
|
Draw(Vulkan* vk, u32 index_count, u32 instance_count)
|
|
{
|
|
vkCmdDraw(vk.cmds[vk.frame_index], index_count, instance_count, 0, 0);
|
|
}
|
|
|
|
void
|
|
DrawIndexed(Vulkan* vk, u32 index_count, u32 instance_count, u32 index_offset)
|
|
{
|
|
vkCmdDrawIndexed(vk.cmds[vk.frame_index], index_count, instance_count, index_offset, 0, 0);
|
|
}
|
|
|
|
bool
|
|
ImmSubmitStart(Vulkan* vk)
|
|
{
|
|
VkResult result = vkWaitForFences(vk.device, 1, &vk.imm_fence, true, 999999999);
|
|
bool success = VkCheck("ImmSubmit failure: vkWaitForFences error", result);
|
|
|
|
if (success)
|
|
{
|
|
result = vkResetFences(vk.device, 1, &vk.imm_fence);
|
|
success = VkCheck("ImmSubmit failure: vkResetFences error", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
result = vkResetCommandBuffer(vk.imm_cmd, 0);
|
|
success = VkCheck("ImmSubmit failure: vkResetCommandBuffer error", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
VkCommandBufferBeginInfo cmd_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
|
|
flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
|
};
|
|
|
|
result = vkBeginCommandBuffer(vk.imm_cmd, &cmd_info);
|
|
success = VkCheck("ImmSubmit failure: vkBeginCommandBuffer error", result);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
bool
|
|
ImmSubmitFinish(Vulkan* vk)
|
|
{
|
|
VkCommandBufferSubmitInfo cmd_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
|
|
commandBuffer: vk.imm_cmd,
|
|
};
|
|
|
|
VkSubmitInfo2 submit_info = {
|
|
sType: VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
|
|
commandBufferInfoCount: 1,
|
|
pCommandBufferInfos: &cmd_info,
|
|
};
|
|
|
|
VkResult result = vkQueueSubmit2(vk.tfer_queue, 1, &submit_info, vk.imm_fence);
|
|
return VkCheck("ImmSubmit failure: vkQueueSubmit2 error", result);
|
|
}
|
|
|
|
bool
|
|
ImmSubmit(Vulkan* vk, Image* image, VkBufferImageCopy copy, void function(Vulkan*, Image*, VkBufferImageCopy) fn)
|
|
{
|
|
bool success = ImmSubmitStart(vk);
|
|
|
|
if (success)
|
|
{
|
|
fn(vk, image, copy);
|
|
|
|
VkResult result = vkEndCommandBuffer(vk.imm_cmd);
|
|
success = VkCheck("ImmSubmit failure: vkEndCommandBuffer error", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
success = ImmSubmitFinish(vk);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
bool
|
|
ImmSubmit(Vulkan* vk, Buffer* buf, VkBufferCopy copy, void function(Vulkan*, Buffer*, VkBufferCopy) fn)
|
|
{
|
|
bool success = ImmSubmitStart(vk);
|
|
|
|
if (success)
|
|
{
|
|
fn(vk, buf, copy);
|
|
|
|
VkResult result = vkEndCommandBuffer(vk.imm_cmd);
|
|
success = VkCheck("ImmSubmit failure: vkEndCommandBuffer error", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
success = ImmSubmitFinish(vk);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
TransferAssets(Vulkan* vk)
|
|
{
|
|
return true;
|
|
}
|
|
|
|
void
|
|
WaitForTransfers(Vulkan* vk)
|
|
{
|
|
vkWaitForFences(vk.device, 1, &vk.imm_fence, VK_TRUE, u64.max);
|
|
}
|
|
|
|
pragma(inline): void
|
|
CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, u32 ch, u8[] data)
|
|
{
|
|
CreateImageView(vk, view, w, h, FMT.RGBA_UNORM, IU.Texture);
|
|
|
|
if (ch == 4)
|
|
{
|
|
assert(Transfer(vk, view, data, w, h), "CreateImageView failure: Image Transfer error");
|
|
}
|
|
else
|
|
{
|
|
Buffer buf;
|
|
CreateBuffer(vk, &buf, BT.Storage, w * h * ch, false);
|
|
assert(Transfer(vk, &buf, data), "CreateImageView failure: Buffer Transfer error");
|
|
|
|
ImageView conv_view;
|
|
CreateImageView(vk, &conv_view, w, h, FMT.RGBA_F32, IU.Convert);
|
|
|
|
WriteConvDescriptor(vk, &buf);
|
|
WriteConvDescriptor(vk, &conv_view);
|
|
|
|
BeginComputePass(vk);
|
|
|
|
Pipeline pipeline = ch == 1 ? vk.r_to_rgba_pipeline :
|
|
ch == 2 ? vk.rg_to_rgba_pipeline :
|
|
vk.rgb_to_rgba_pipeline;
|
|
|
|
Bind(vk, pipeline, vk.conv_desc_set, true);
|
|
|
|
ConvPushConst pc = {
|
|
x: w,
|
|
y: h,
|
|
};
|
|
|
|
vkCmdPushConstants(
|
|
vk.comp_cmd,
|
|
vk.conv_pipeline_layout,
|
|
VK_SHADER_STAGE_COMPUTE_BIT,
|
|
0,
|
|
ConvPushConst.sizeof,
|
|
&pc
|
|
);
|
|
|
|
Transition(vk.comp_cmd, &conv_view, VK_IMAGE_LAYOUT_GENERAL);
|
|
|
|
Dispatch(vk, vk.comp_cmd);
|
|
|
|
Transition(vk.comp_cmd, view, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
|
|
|
VkExtent2D extent = { width: w, height: h };
|
|
Copy(vk.comp_cmd, &conv_view.base, &view.base, extent, extent);
|
|
|
|
Transition(vk.comp_cmd, view, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
|
|
|
FinishComputePass(vk);
|
|
|
|
vkWaitForFences(vk.device, 1, &vk.comp_fence, VK_TRUE, u64.max);
|
|
|
|
vkQueueWaitIdle(vk.tfer_queue);
|
|
|
|
Destroy(vk, &buf);
|
|
Destroy(vk, &conv_view);
|
|
}
|
|
}
|
|
|
|
pragma(inline): void
|
|
BeginComputePass(Vulkan* vk)
|
|
{
|
|
VkResult result = vkWaitForFences(vk.device, 1, &vk.comp_fence, VK_TRUE, 1000000000);
|
|
VkCheckA("BeginComputePass failure: vkWaitForFences error", result);
|
|
|
|
result = vkResetFences(vk.device, 1, &vk.comp_fence);
|
|
VkCheckA("BeginComputepass failure: vkResetFences error", result);
|
|
|
|
|
|
result = vkResetCommandBuffer(vk.comp_cmd, 0);
|
|
VkCheckA("BeginComputePass failure: vkResetCommandBuffer error", result);
|
|
|
|
VkCommandBufferBeginInfo cmd_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
|
|
flags: VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
|
};
|
|
|
|
result = vkBeginCommandBuffer(vk.comp_cmd, &cmd_info);
|
|
VkCheckA("BeginComputePass failure: vkBeginCommandBuffer error", result);
|
|
}
|
|
|
|
pragma(inline): void
|
|
FinishComputePass(Vulkan* vk)
|
|
{
|
|
VkResult result = vkEndCommandBuffer(vk.comp_cmd);
|
|
VkCheckA("FinishComputePass failure: vkEndCommandBuffer error", result);
|
|
|
|
VkCommandBufferSubmitInfo cmd_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
|
|
commandBuffer: vk.comp_cmd,
|
|
};
|
|
|
|
VkSubmitInfo2 submit_info = {
|
|
sType: VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
|
|
commandBufferInfoCount: 1,
|
|
pCommandBufferInfos: &cmd_info,
|
|
};
|
|
|
|
result = vkQueueSubmit2(vk.gfx_queue, 1, &submit_info, vk.comp_fence);
|
|
VkCheckA("FinishComputePass failure: vkQueueSubmit2 error", result);
|
|
}
|
|
|
|
pragma(inline): void
|
|
CreateBufferView(Vulkan* vk, BufferView* view, u64 size, Format format)
|
|
{
|
|
CreateBuffer(vk, &view.base, BT.BufferView, size, false);
|
|
|
|
VkBufferViewCreateInfo info = {
|
|
sType: VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
|
|
buffer: view.buffer,
|
|
format: format,
|
|
range: size,
|
|
};
|
|
|
|
VkResult result = vkCreateBufferView(vk.device, &info, null, &view.view);
|
|
VkCheckA("CreateBufferView failure: vkCreateBufferView failed", result);
|
|
|
|
view.size = size;
|
|
}
|
|
|
|
pragma(inline): void
|
|
CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, Format format, ImageUsage usage, bool depth_image = false)
|
|
{
|
|
VmaAllocationCreateInfo alloc_info = {
|
|
usage: VMA_MEMORY_USAGE_GPU_ONLY,
|
|
requiredFlags: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
|
|
};
|
|
|
|
VkImageCreateInfo image_info = {
|
|
sType: VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
|
|
imageType: VK_IMAGE_TYPE_2D,
|
|
mipLevels: 1,
|
|
arrayLayers: 1,
|
|
format: format,
|
|
tiling: VK_IMAGE_TILING_OPTIMAL,
|
|
initialLayout: VK_IMAGE_LAYOUT_UNDEFINED,
|
|
usage: usage,
|
|
samples: VK_SAMPLE_COUNT_1_BIT,
|
|
extent: {
|
|
width: w,
|
|
height: h,
|
|
depth: 1,
|
|
},
|
|
};
|
|
|
|
u32[2] indices = [vk.gfx_index, vk.tfer_index];
|
|
if (vk.gfx_index != vk.tfer_index)
|
|
{
|
|
image_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
|
|
image_info.queueFamilyIndexCount = 2;
|
|
image_info.pQueueFamilyIndices = indices.ptr;
|
|
}
|
|
|
|
VkResult result = vmaCreateImage(vk.vma, &image_info, &alloc_info, &view.image, &view.alloc, null);
|
|
// TODO: handle errors and realloc
|
|
assert(VkCheck("CreateImageView failure: vmaCreateImage error", result), "CreateImageView failure");
|
|
|
|
VkImageViewCreateInfo view_info = {
|
|
sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
|
|
image: view.image,
|
|
viewType: VK_IMAGE_VIEW_TYPE_2D,
|
|
format: format,
|
|
subresourceRange: {
|
|
aspectMask: (depth_image ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT),
|
|
levelCount: 1,
|
|
layerCount: 1,
|
|
},
|
|
};
|
|
|
|
result = vkCreateImageView(vk.device, &view_info, null, &view.view);
|
|
// TODO: also handle here
|
|
assert(VkCheck("CreateImageView failure: vkCreateImageView error", result), "CreateImageView failure");
|
|
|
|
view.layout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
view.format = format;
|
|
view.w = w;
|
|
view.h = h;
|
|
view.depth_image = depth_image;
|
|
view.usage = usage;
|
|
}
|
|
|
|
void
|
|
PushConstants(T)(Vulkan* vk, Pipeline pipeline_id, T* pc)
|
|
{
|
|
assert(pipeline_id > 0, "PushConstants pipeline_id == 0");
|
|
PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_id;
|
|
VkShaderStageFlags stage = (pipeline.type == VK_PIPELINE_BIND_POINT_GRAPHICS ?
|
|
VK_SHADER_STAGE_VERTEX_BIT|VK_SHADER_STAGE_FRAGMENT_BIT :
|
|
VK_SHADER_STAGE_COMPUTE_BIT);
|
|
|
|
vkCmdPushConstants(
|
|
vk.cmds[vk.frame_index],
|
|
pipeline.layout,
|
|
stage,
|
|
0,
|
|
T.sizeof,
|
|
pc
|
|
);
|
|
}
|
|
|
|
void
|
|
ImageBarrier(Vulkan* vk)
|
|
{
|
|
VkMemoryBarrier2 barrier = {
|
|
sType: VK_STRUCTURE_TYPE_MEMORY_BARRIER_2,
|
|
srcStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
|
srcAccessMask: VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
|
|
dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
|
dstAccessMask: VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
|
|
};
|
|
|
|
VkDependencyInfo dependency = {
|
|
sType: VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
|
|
dependencyFlags: VK_DEPENDENCY_BY_REGION_BIT,
|
|
memoryBarrierCount: 1,
|
|
pMemoryBarriers: &barrier,
|
|
};
|
|
|
|
vkCmdPipelineBarrier2(vk.cmds[vk.frame_index], &dependency);
|
|
}
|
|
|
|
bool
|
|
Transfer(T)(Vulkan* vk, Buffer* buf, T[] data)
|
|
{
|
|
u8[] u8_data = (cast(u8*)(data.ptr))[0 .. T.sizeof * data.length];
|
|
return Transfer(vk, buf, u8_data);
|
|
}
|
|
|
|
bool
|
|
Transfer(Vulkan* vk, Buffer* buf, u8[] data)
|
|
{
|
|
bool success = TransferReady(vk);
|
|
|
|
u64 copied = 0;
|
|
while(copied != data.length && success)
|
|
{
|
|
if (copied != 0)
|
|
{
|
|
success = TransferReady(vk);
|
|
if (!success)
|
|
{
|
|
break;
|
|
}
|
|
}
|
|
|
|
u64 transfer_length = cast(u64)vk.transfer_buf.data.length;
|
|
u64 data_length = cast(u64)data.length - copied;
|
|
u64 copy_length = transfer_length > data_length ? data_length : transfer_length;
|
|
|
|
vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length];
|
|
|
|
auto fn = function(Vulkan* vk, Buffer* buf, VkBufferCopy copy)
|
|
{
|
|
vkCmdCopyBuffer(vk.imm_cmd, vk.transfer_buf.buffer, buf.buffer, 1, ©);
|
|
};
|
|
|
|
VkBufferCopy copy = {
|
|
srcOffset: 0,
|
|
dstOffset: copied,
|
|
size: copy_length,
|
|
};
|
|
|
|
success = ImmSubmit(vk, buf, copy, fn);
|
|
|
|
copied += copy_length;
|
|
}
|
|
|
|
WaitForTransfers(vk);
|
|
|
|
return success;
|
|
}
|
|
|
|
bool
|
|
Transfer(T)(Vulkan* vk, Buffer* buf, T* ptr)
|
|
{
|
|
assert(T.sizeof < vk.transfer_buf.data.length, "Transfer failure: structure size is too large");
|
|
|
|
bool success = TransferReady(vk);
|
|
if (success)
|
|
{
|
|
memcpy(vk.transfer_buf.data.ptr, ptr, T.sizeof);
|
|
|
|
auto fn = function(Vulkan* vk, Buffer* buf, VkBufferCopy copy)
|
|
{
|
|
vkCmdCopyBuffer(vk.imm_cmd, vk.transfer_buf.buffer, buf.buffer, 1, ©);
|
|
};
|
|
|
|
VkBufferCopy copy = {
|
|
srcOffset: 0,
|
|
dstOffset: 0,
|
|
size: T.sizeof,
|
|
};
|
|
|
|
success = ImmSubmit(vk, buf, copy, fn);
|
|
}
|
|
|
|
WaitForTransfers(vk);
|
|
|
|
return success;
|
|
}
|
|
|
|
// Needs to be done before writing to the transfer buffer as otherwise it'll overwrite it while previous transfers are occurring
|
|
bool
|
|
TransferReady(Vulkan* vk)
|
|
{
|
|
VkResult result = vkWaitForFences(vk.device, 1, &vk.imm_fence, true, 999999999);
|
|
return VkCheck("Transfer failure: vkWaitForFences error", result);
|
|
}
|
|
|
|
pragma(inline): bool
|
|
Transfer(Vulkan* vk, ImageView* view, u8[] data, u32 w, u32 h)
|
|
{
|
|
return Transfer(vk, &view.base, data, w, h);
|
|
}
|
|
|
|
bool
|
|
Transfer(Vulkan* vk, Image* image, u8[] data, u32 w, u32 h)
|
|
{
|
|
bool success = true;
|
|
|
|
u64 copied = 0;
|
|
while(copied != data.length)
|
|
{
|
|
u64 transfer_length = cast(u64)vk.transfer_buf.data.length;
|
|
u64 data_length = cast(u64)data.length - copied;
|
|
u64 copy_length = transfer_length > data_length ? data_length : transfer_length;
|
|
|
|
vk.transfer_buf.data[0 .. copy_length] = data[copied .. copy_length];
|
|
|
|
auto fn = function(Vulkan* vk, Image* image, VkBufferImageCopy copy)
|
|
{
|
|
Transition(vk.imm_cmd, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
|
|
|
vkCmdCopyBufferToImage(vk.imm_cmd, vk.transfer_buf.buffer, image.image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ©);
|
|
|
|
Transition(vk.imm_cmd, image, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
|
};
|
|
|
|
VkBufferImageCopy copy = {
|
|
bufferRowLength: w,
|
|
bufferImageHeight: h,
|
|
imageSubresource: {
|
|
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
|
|
layerCount: 1,
|
|
},
|
|
imageExtent: {
|
|
width: w,
|
|
height: h,
|
|
depth: 1,
|
|
},
|
|
bufferOffset: copied,
|
|
};
|
|
|
|
success = ImmSubmit(vk, image, copy, fn);
|
|
|
|
copied += copy_length;
|
|
}
|
|
|
|
WaitForTransfers(vk);
|
|
|
|
return success;
|
|
}
|
|
|
|
pragma(inline): void
|
|
Copy(VkCommandBuffer cmd, Image* src, Image* dst, VkExtent2D src_ext, VkExtent2D dst_ext)
|
|
{
|
|
Copy(cmd, src.image, dst.image, src.layout, dst.layout, src_ext, dst_ext);
|
|
}
|
|
|
|
pragma(inline): void
|
|
Copy(VkCommandBuffer cmd, Image* src, VkImage dst, VkImageLayout dst_layout, VkExtent2D src_ext, VkExtent2D dst_ext)
|
|
{
|
|
Copy(cmd, src.image, dst, src.layout, dst_layout, src_ext, dst_ext);
|
|
}
|
|
|
|
pragma(inline): void
|
|
Copy(VkCommandBuffer cmd, VkImage src, VkImage dst, VkImageLayout src_layout, VkImageLayout dst_layout, VkExtent2D src_ext, VkExtent2D dst_ext)
|
|
{
|
|
VkImageBlit blit = {
|
|
srcOffsets: [
|
|
{ x: 0, y: 0 },
|
|
{ x: cast(i32)src_ext.width, y: cast(i32)src_ext.height, z: 1 },
|
|
],
|
|
dstOffsets: [
|
|
{ x: 0, y: 0 },
|
|
{ x: cast(i32)dst_ext.width, y: cast(i32)dst_ext.height, z: 1 },
|
|
],
|
|
srcSubresource: {
|
|
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
|
|
baseArrayLayer: 0,
|
|
layerCount: 1,
|
|
mipLevel: 0,
|
|
},
|
|
dstSubresource: {
|
|
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
|
|
baseArrayLayer: 0,
|
|
layerCount: 1,
|
|
mipLevel: 0,
|
|
},
|
|
};
|
|
|
|
vkCmdBlitImage(
|
|
cmd,
|
|
src,
|
|
src_layout,
|
|
dst,
|
|
dst_layout,
|
|
1,
|
|
&blit,
|
|
VK_FILTER_LINEAR
|
|
);
|
|
}
|
|
|
|
void
|
|
Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets, bool compute = false)
|
|
{
|
|
assert(pipeline_handle > 0, "Bind failure: pipeline is 0");
|
|
|
|
VkCommandBuffer cmd = (compute ? vk.comp_cmd : vk.cmds[vk.frame_index]);
|
|
PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_handle;
|
|
BindPipeline(vk, cmd, pipeline);
|
|
|
|
u32[] offsets;
|
|
if (vk.global_set.dynamic_count > 0)
|
|
{
|
|
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], vk.global_set.dynamic_count);
|
|
}
|
|
|
|
vkCmdBindDescriptorSets(
|
|
cmd,
|
|
pipeline.type,
|
|
pipeline.layout,
|
|
0,
|
|
1,
|
|
&vk.global_set.handle,
|
|
cast(u32)offsets.length,
|
|
offsets.ptr
|
|
);
|
|
|
|
u32 offset_count;
|
|
VkDescriptorSet[] handles = AllocArray!(VkDescriptorSet)(&vk.frame_arenas[vk.frame_index], sets.length);
|
|
|
|
foreach(i, set; sets)
|
|
{
|
|
handles[i] = set.handle;
|
|
offset_count += set.dynamic_count;
|
|
}
|
|
|
|
if (offset_count > 0)
|
|
{
|
|
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], offset_count);
|
|
}
|
|
|
|
vkCmdBindDescriptorSets(
|
|
cmd,
|
|
pipeline.type,
|
|
pipeline.layout,
|
|
1,
|
|
cast(u32)handles.length,
|
|
handles.ptr,
|
|
cast(u32)offsets.length,
|
|
offsets.ptr
|
|
);
|
|
}
|
|
|
|
PipelineHandles*
|
|
NewPipeline(Vulkan* vk)
|
|
{
|
|
PipelineHandles* pipeline = vk.pipeline_handles.ptr + vk.pipeline_count;
|
|
pipeline.index = vk.pipeline_count;
|
|
vk.pipeline_count += 1;
|
|
|
|
return pipeline;
|
|
}
|
|
|
|
void
|
|
Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set, bool compute = false)
|
|
{
|
|
assert(pipeline_handle > 0, "Bind failure: pipeline is 0");
|
|
|
|
VkCommandBuffer cmd = (compute ? vk.comp_cmd : vk.cmds[vk.frame_index]);
|
|
PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_handle;
|
|
BindPipeline(vk, cmd, pipeline);
|
|
|
|
u32[] offsets;
|
|
if (vk.global_set.dynamic_count > 0)
|
|
{
|
|
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], vk.global_set.dynamic_count);
|
|
}
|
|
|
|
vkCmdBindDescriptorSets(
|
|
cmd,
|
|
pipeline.type,
|
|
pipeline.layout,
|
|
0,
|
|
1,
|
|
&vk.global_set.handle,
|
|
cast(u32)offsets.length,
|
|
offsets.ptr
|
|
);
|
|
|
|
if (set.dynamic_count > 0)
|
|
{
|
|
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], set.dynamic_count);
|
|
}
|
|
|
|
vkCmdBindDescriptorSets(
|
|
cmd,
|
|
pipeline.type,
|
|
pipeline.layout,
|
|
1,
|
|
1,
|
|
&set.handle,
|
|
cast(u32)offsets.length,
|
|
offsets.ptr
|
|
);
|
|
}
|
|
|
|
pragma(inline): void
|
|
BindPipeline(Vulkan* vk, VkCommandBuffer cmd, PipelineHandles* pipeline)
|
|
{
|
|
vkCmdBindPipeline(cmd, pipeline.type, pipeline.handle);
|
|
vk.last_pipeline[vk.frame_index] = pipeline.handle;
|
|
|
|
VkViewport viewport = {
|
|
x: 0.0,
|
|
y: 0.0,
|
|
width: cast(f32)vk.swapchain_extent.width,
|
|
height: cast(f32)vk.swapchain_extent.height,
|
|
minDepth: 0.0,
|
|
maxDepth: 1.0,
|
|
};
|
|
|
|
vkCmdSetViewport(cmd, 0, 1, &viewport);
|
|
|
|
VkRect2D scissor = {
|
|
extent: {
|
|
width: vk.swapchain_extent.width,
|
|
height: vk.swapchain_extent.height,
|
|
},
|
|
};
|
|
|
|
vkCmdSetScissor(cmd, 0, 1, &scissor);
|
|
}
|
|
|
|
pragma(inline): void
|
|
Transition(VkCommandBuffer cmd, VkImage image, VkImageLayout current_layout, VkImageLayout new_layout)
|
|
{
|
|
VkImageMemoryBarrier2 barrier = {
|
|
sType: VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
|
|
srcStageMask: VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
|
|
srcAccessMask: VK_ACCESS_2_MEMORY_WRITE_BIT,
|
|
dstStageMask: VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
|
|
dstAccessMask: VK_ACCESS_2_MEMORY_WRITE_BIT | VK_ACCESS_2_MEMORY_READ_BIT,
|
|
oldLayout: current_layout,
|
|
newLayout: new_layout,
|
|
image: image,
|
|
subresourceRange: {
|
|
aspectMask: new_layout == VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT,
|
|
baseMipLevel: 0,
|
|
levelCount: VK_REMAINING_MIP_LEVELS,
|
|
baseArrayLayer: 0,
|
|
layerCount: VK_REMAINING_ARRAY_LAYERS,
|
|
},
|
|
};
|
|
|
|
VkDependencyInfo dep_info = {
|
|
sType: VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
|
|
imageMemoryBarrierCount: 1,
|
|
pImageMemoryBarriers: &barrier,
|
|
};
|
|
|
|
vkCmdPipelineBarrier2(cmd, &dep_info);
|
|
}
|
|
|
|
pragma(inline): void
|
|
Transition(VkCommandBuffer cmd, ImageView* view, VkImageLayout new_layout)
|
|
{
|
|
Transition(cmd, view.image, view.layout, new_layout);
|
|
view.layout = new_layout;
|
|
}
|
|
|
|
pragma(inline): void
|
|
Transition(VkCommandBuffer cmd, Image* image, VkImageLayout new_layout)
|
|
{
|
|
Transition(cmd, image.image, image.layout, new_layout);
|
|
image.layout = new_layout;
|
|
}
|
|
|
|
bool
|
|
BuildShader(Vulkan* vk, Shader* shader, u8[] bytes)
|
|
{
|
|
VkShaderModuleCreateInfo shader_info = {
|
|
sType: VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
|
|
codeSize: bytes.length,
|
|
pCode: cast(uint*)bytes.ptr,
|
|
};
|
|
|
|
VkResult result = vkCreateShaderModule(vk.device, &shader_info, null, shader);
|
|
return VkCheck("vkCreateShaderModule failure", result);
|
|
}
|
|
|
|
void
|
|
InitFramebufferAndRenderPass(Vulkan* vk)
|
|
{
|
|
VkAttachmentDescription[2] attach_descriptions = [
|
|
{
|
|
format: vk.draw_image.format,
|
|
samples: VK_SAMPLE_COUNT_1_BIT,
|
|
loadOp: VK_ATTACHMENT_LOAD_OP_CLEAR,
|
|
storeOp: VK_ATTACHMENT_STORE_OP_STORE,
|
|
stencilLoadOp: VK_ATTACHMENT_LOAD_OP_DONT_CARE,
|
|
stencilStoreOp: VK_ATTACHMENT_STORE_OP_DONT_CARE,
|
|
initialLayout: VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
|
finalLayout: VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
|
},
|
|
{
|
|
format: vk.depth_image.format,
|
|
samples: VK_SAMPLE_COUNT_1_BIT,
|
|
loadOp: VK_ATTACHMENT_LOAD_OP_CLEAR,
|
|
storeOp: VK_ATTACHMENT_STORE_OP_STORE,
|
|
initialLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
|
|
finalLayout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
|
|
},
|
|
];
|
|
|
|
VkAttachmentReference color_ref = {
|
|
attachment: 0,
|
|
layout: VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
|
};
|
|
|
|
VkAttachmentReference depth_ref = {
|
|
attachment: 1,
|
|
layout: VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
|
|
};
|
|
|
|
VkSubpassDescription subpass = {
|
|
pipelineBindPoint: VK_PIPELINE_BIND_POINT_GRAPHICS,
|
|
colorAttachmentCount: 1,
|
|
pColorAttachments: &color_ref,
|
|
pDepthStencilAttachment: &depth_ref,
|
|
};
|
|
|
|
VkSubpassDependency self_dependency = {
|
|
srcSubpass: 0,
|
|
dstSubpass: 0,
|
|
srcStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
|
dstStageMask: VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
|
|
srcAccessMask: VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
|
|
dstAccessMask: VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
|
|
dependencyFlags: VK_DEPENDENCY_BY_REGION_BIT,
|
|
};
|
|
|
|
VkRenderPassCreateInfo pass_info = {
|
|
sType: VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
|
|
attachmentCount: cast(u32)attach_descriptions.length,
|
|
pAttachments: attach_descriptions.ptr,
|
|
subpassCount: 1,
|
|
pSubpasses: &subpass,
|
|
dependencyCount: 1,
|
|
pDependencies: &self_dependency,
|
|
};
|
|
|
|
VkResult result = vkCreateRenderPass(vk.device, &pass_info, null, &vk.render_pass);
|
|
VkCheckA("vkCreateRenderPass failure", result);
|
|
|
|
CreateFramebuffer(vk);
|
|
}
|
|
|
|
void
|
|
CreateFramebuffer(Vulkan* vk)
|
|
{
|
|
|
|
VkFramebufferCreateInfo framebuffer_info = {
|
|
sType: VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
|
|
renderPass: vk.render_pass,
|
|
attachmentCount: 2,
|
|
pAttachments: [vk.draw_image.view, vk.depth_image.view],
|
|
width: vk.swapchain_extent.width,
|
|
height: vk.swapchain_extent.height,
|
|
layers: 1,
|
|
};
|
|
|
|
VkResult result = vkCreateFramebuffer(vk.device, &framebuffer_info, null, &vk.framebuffer);
|
|
VkCheckA("vkCreateFramebuffer failure", result);
|
|
}
|
|
|
|
bool
|
|
CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* build_info)
|
|
{
|
|
PipelineHandles* pipeline = NewPipeline(vk);
|
|
pipeline.type = VK_PIPELINE_BIND_POINT_GRAPHICS;
|
|
pipeline.layout = build_info.layout;
|
|
|
|
VkDynamicState[2] dyn_state = [ VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR ];
|
|
|
|
VkPipelineDynamicStateCreateInfo dyn_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
|
|
pDynamicStates: dyn_state.ptr,
|
|
dynamicStateCount: cast(u32)dyn_state.length,
|
|
};
|
|
|
|
VkPipelineInputAssemblyStateCreateInfo assembly_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
|
|
topology: VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
|
|
primitiveRestartEnable: VK_FALSE,
|
|
};
|
|
|
|
VkPipelineRasterizationStateCreateInfo rasterization_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
|
|
cullMode: VK_CULL_MODE_BACK_BIT,
|
|
polygonMode: VK_POLYGON_MODE_FILL,
|
|
lineWidth: 1.0,
|
|
frontFace: VK_FRONT_FACE_COUNTER_CLOCKWISE,
|
|
};
|
|
|
|
VkPipelineMultisampleStateCreateInfo multisample_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
|
|
rasterizationSamples: VK_SAMPLE_COUNT_1_BIT,
|
|
minSampleShading: 1.0,
|
|
alphaToCoverageEnable: VK_FALSE,
|
|
alphaToOneEnable: VK_FALSE,
|
|
};
|
|
|
|
VkPipelineDepthStencilStateCreateInfo depth_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
|
|
depthTestEnable: VK_TRUE,
|
|
depthWriteEnable: VK_TRUE,
|
|
depthCompareOp: VK_COMPARE_OP_GREATER_OR_EQUAL,
|
|
depthBoundsTestEnable: VK_FALSE,
|
|
stencilTestEnable: VK_FALSE,
|
|
minDepthBounds: 0.0,
|
|
maxDepthBounds: 1.0,
|
|
};
|
|
|
|
VkPipelineRenderingCreateInfo rendering_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
|
|
colorAttachmentCount: 1,
|
|
pColorAttachmentFormats: cast(VkFormat*)&vk.draw_image.format,
|
|
depthAttachmentFormat: vk.depth_image.format,
|
|
};
|
|
|
|
VkPipelineColorBlendAttachmentState blend_state = {
|
|
colorWriteMask: VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,
|
|
blendEnable: VK_TRUE,
|
|
srcColorBlendFactor: VK_BLEND_FACTOR_SRC_ALPHA,
|
|
dstColorBlendFactor: VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
|
|
colorBlendOp: VK_BLEND_OP_ADD,
|
|
srcAlphaBlendFactor: VK_BLEND_FACTOR_SRC_ALPHA,
|
|
dstAlphaBlendFactor: VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
|
|
alphaBlendOp: VK_BLEND_OP_ADD,
|
|
};
|
|
|
|
VkPipelineColorBlendStateCreateInfo blend_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
|
|
logicOpEnable: VK_FALSE,
|
|
logicOp: VK_LOGIC_OP_COPY,
|
|
attachmentCount: 1,
|
|
pAttachments: &blend_state,
|
|
};
|
|
|
|
VkVertexInputBindingDescription vertex_input_desc = {
|
|
binding: 0,
|
|
inputRate: cast(VkVertexInputRate)build_info.input_rate,
|
|
stride: build_info.input_rate_stride,
|
|
};
|
|
|
|
VkPipelineVertexInputStateCreateInfo input_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
|
|
vertexBindingDescriptionCount: 1,
|
|
pVertexBindingDescriptions: &vertex_input_desc,
|
|
vertexAttributeDescriptionCount: cast(u32)build_info.vertex_attributes.length,
|
|
pVertexAttributeDescriptions: build_info.vertex_attributes.ptr,
|
|
};
|
|
|
|
VkPipelineViewportStateCreateInfo viewport_info = {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
|
|
viewportCount: 1,
|
|
scissorCount: 1,
|
|
};
|
|
|
|
VkPipelineShaderStageCreateInfo[2] shader_info = [
|
|
{
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
|
|
stage: VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
pName: "main",
|
|
},
|
|
{
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
|
|
stage: VK_SHADER_STAGE_VERTEX_BIT,
|
|
pName: "main",
|
|
},
|
|
];
|
|
|
|
Arena* arena = &vk.frame_arenas[0];
|
|
|
|
bool success = true;
|
|
Shader frag_module, vert_module;
|
|
|
|
success &= BuildShader(vk, &frag_module, build_info.frag_shader);
|
|
success &= BuildShader(vk, &vert_module, build_info.vertex_shader);
|
|
|
|
scope(exit)
|
|
{
|
|
Destroy(vk, frag_module);
|
|
Destroy(vk, vert_module);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
__traits(getMember, shader_info.ptr + 0, "module") = frag_module;
|
|
__traits(getMember, shader_info.ptr + 1, "module") = vert_module;
|
|
|
|
VkSpecializationInfo vert_spec_info = {
|
|
dataSize: build_info.vert_spec.size,
|
|
mapEntryCount: cast(u32)build_info.vert_spec.entries.length,
|
|
pMapEntries: build_info.vert_spec.entries.ptr,
|
|
pData: build_info.vert_spec.data,
|
|
};
|
|
|
|
if (build_info.vert_spec.entries.length > 0)
|
|
{
|
|
shader_info[0].pSpecializationInfo = &vert_spec_info;
|
|
}
|
|
|
|
VkSpecializationInfo frag_spec_info = {
|
|
dataSize: build_info.frag_spec.size,
|
|
mapEntryCount: cast(u32)build_info.frag_spec.entries.length,
|
|
pMapEntries: build_info.frag_spec.entries.ptr,
|
|
pData: build_info.frag_spec.data,
|
|
};
|
|
|
|
if (build_info.frag_spec.entries.length > 0)
|
|
{
|
|
shader_info[1].pSpecializationInfo = &frag_spec_info;
|
|
}
|
|
|
|
VkGraphicsPipelineCreateInfo create_info = {
|
|
sType: VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
|
|
pNext: &rendering_info,
|
|
pVertexInputState: &input_info,
|
|
pInputAssemblyState: &assembly_info,
|
|
pViewportState: &viewport_info,
|
|
pRasterizationState: &rasterization_info,
|
|
pMultisampleState: &multisample_info,
|
|
pColorBlendState: &blend_info,
|
|
pDepthStencilState: &depth_info,
|
|
pDynamicState: &dyn_info,
|
|
stageCount: cast(u32)shader_info.length,
|
|
pStages: shader_info.ptr,
|
|
renderPass: vk.render_pass,
|
|
layout: build_info.layout,
|
|
};
|
|
|
|
VkResult result = vkCreateGraphicsPipelines(vk.device, null, 1, &create_info, null, &pipeline.handle);
|
|
success = VkCheck("CreateGraphicsPipeline failure", result);
|
|
|
|
*pipeline_handle = pipeline.index;
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
Pipeline
|
|
CreateComputePipeline(Vulkan* vk, CompPipelineInfo* comp_info)
|
|
{
|
|
VkComputePipelineCreateInfo info = {
|
|
sType: VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
|
|
layout: comp_info.layout,
|
|
stage: {
|
|
sType: VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
|
|
stage: VK_SHADER_STAGE_COMPUTE_BIT,
|
|
pName: "main",
|
|
},
|
|
};
|
|
|
|
Shader comp_module;
|
|
assert(BuildShader(vk, &comp_module, comp_info.shader), "Unable to build compute shader");
|
|
scope(exit) Destroy(vk, comp_module);
|
|
|
|
__traits(getMember, &info.stage, "module") = comp_module.value;
|
|
|
|
VkSpecializationInfo spec_info = {
|
|
dataSize: comp_info.spec.size,
|
|
mapEntryCount: cast(u32)comp_info.spec.entries.length,
|
|
pMapEntries: comp_info.spec.entries.ptr,
|
|
pData: comp_info.spec.data,
|
|
};
|
|
|
|
if (comp_info.spec.entries.length > 0)
|
|
{
|
|
info.stage.pSpecializationInfo = &spec_info;
|
|
}
|
|
|
|
PipelineHandles* pipeline = NewPipeline(vk);
|
|
pipeline.type = VK_PIPELINE_BIND_POINT_COMPUTE;
|
|
pipeline.layout = comp_info.layout;
|
|
|
|
Pipeline pipeline_handle = pipeline.index;
|
|
|
|
VkResult result = vkCreateComputePipelines(vk.device, null, 1, &info, null, &pipeline.handle);
|
|
assert(VkCheck("CreateComputePipeline failure", result), "Unable to build pipeline");
|
|
|
|
return pipeline_handle;
|
|
}
|
|
|
|
void
|
|
ClearColor(Vulkan* vk, ImageView* view, f32[4] color)
|
|
{
|
|
VkImageSubresourceRange clear_range = {
|
|
aspectMask: (view.depth_image ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT),
|
|
levelCount: 1,
|
|
layerCount: 1,
|
|
};
|
|
|
|
vkCmdClearColorImage(
|
|
vk.cmds[vk.frame_index],
|
|
view.image,
|
|
view.layout,
|
|
cast(VkClearColorValue*)color,
|
|
1,
|
|
&clear_range
|
|
);
|
|
}
|
|
|
|
void
|
|
WaitIdle(Vulkan* vk)
|
|
{
|
|
vkDeviceWaitIdle(vk.device);
|
|
}
|
|
|
|
void
|
|
Destroy(Vulkan* vk, Shader shader)
|
|
{
|
|
vkDestroyShaderModule(vk.device, shader, null);
|
|
}
|
|
|
|
void
|
|
Destroy(Vulkan* vk, Pipeline pipeline)
|
|
{
|
|
vkDestroyPipeline(vk.device, vk.pipeline_handles[pipeline].handle, null);
|
|
}
|
|
|
|
void
|
|
Destroy(Vulkan* vk)
|
|
{
|
|
vkDeviceWaitIdle(vk.device);
|
|
|
|
alias N = Node!(SI);
|
|
assert(vk.cleanup_list.first != null, "node null");
|
|
for(N* node = vk.cleanup_list.first; node != null; node = node.next)
|
|
{
|
|
switch (node.value)
|
|
{
|
|
case SI.Renderer:
|
|
DestroyRenderer(vk);
|
|
break;
|
|
case SI.Instance:
|
|
Destroy(vk.instance);
|
|
break;
|
|
case SI.Debug:
|
|
Destroy(vk.dbg_msg, vk.instance);
|
|
break;
|
|
case SI.Surface:
|
|
Destroy(vk.surface, vk.instance);
|
|
break;
|
|
case SI.Device:
|
|
Destroy(vk.device);
|
|
break;
|
|
case SI.Vma:
|
|
Destroy(vk.vma);
|
|
break;
|
|
case SI.FrameStructures:
|
|
DestroyFS(vk);
|
|
break;
|
|
case SI.Swapchain:
|
|
Destroy(vk.swapchain, vk.present_images, vk.device);
|
|
break;
|
|
case SI.DrawImages:
|
|
Destroy(vk, &vk.draw_image);
|
|
Destroy(vk, &vk.depth_image);
|
|
break;
|
|
case SI.DescriptorPools:
|
|
DestroyDescriptorPools(vk);
|
|
break;
|
|
case SI.Buffers:
|
|
Destroy(vk, &vk.transfer_buf);
|
|
break;
|
|
case SI.Pipelines:
|
|
DestroyPipelines(vk);
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
void
|
|
DestroyDescriptorPools(Vulkan* vk)
|
|
{
|
|
vkDestroyDescriptorPool(vk.device, vk.active_pool, null);
|
|
|
|
Node!(VkDescriptorPool)* node = vk.full_pools.first;
|
|
for(;;)
|
|
{
|
|
if (node == null)
|
|
{
|
|
break;
|
|
}
|
|
|
|
vkDestroyDescriptorPool(vk.device, node.value, null);
|
|
|
|
node = node.next;
|
|
}
|
|
}
|
|
|
|
void
|
|
DestroyPipelines(Vulkan* vk)
|
|
{
|
|
if (vk.conv_pipeline_layout)
|
|
{
|
|
vkDestroyPipelineLayout(vk.device, vk.conv_pipeline_layout, null);
|
|
}
|
|
|
|
if (vk.conv_desc_layout)
|
|
{
|
|
vkDestroyDescriptorSetLayout(vk.device, vk.conv_desc_layout, null);
|
|
}
|
|
|
|
if (vk.r_to_rgba_pipeline)
|
|
{
|
|
vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.r_to_rgba_pipeline].handle, null);
|
|
}
|
|
|
|
if (vk.rg_to_rgba_pipeline)
|
|
{
|
|
vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.rg_to_rgba_pipeline].handle, null);
|
|
}
|
|
|
|
if (vk.rgb_to_rgba_pipeline)
|
|
{
|
|
vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.rgb_to_rgba_pipeline].handle, null);
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(T)(Vulkan* vk, MappedBuffer!(T)* buf)
|
|
{
|
|
vmaUnmapMemory(vk.vma, buf.alloc);
|
|
Destroy(vk, &buf.base);
|
|
}
|
|
|
|
void
|
|
Destroy(Vulkan* vk, Buffer* buf)
|
|
{
|
|
vmaDestroyBuffer(vk.vma, buf.buffer, buf.alloc);
|
|
}
|
|
|
|
void
|
|
InitDescriptors(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.DescriptorPools);
|
|
|
|
PushDescriptorPool(vk);
|
|
}
|
|
|
|
void
|
|
PushDescriptorPool(Vulkan* vk)
|
|
{
|
|
VkDescriptorPoolSize[7] pool_sizes = [
|
|
{ type: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, descriptorCount: 4096 },
|
|
{ type: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 4096 },
|
|
{ type: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, descriptorCount: 4096 },
|
|
{ type: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, descriptorCount: 4096 },
|
|
{ type: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, descriptorCount: 4096 },
|
|
{ type: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 4096 },
|
|
{ type: VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, descriptorCount: 4096 },
|
|
];
|
|
|
|
VkDescriptorPoolCreateInfo pool_info = {
|
|
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
|
|
poolSizeCount: cast(u32)pool_sizes.length,
|
|
pPoolSizes: pool_sizes.ptr,
|
|
maxSets: MAX_SETS,
|
|
};
|
|
|
|
VkDescriptorPool pool;
|
|
VkResult result = vkCreateDescriptorPool(vk.device, &pool_info, null, &pool);
|
|
bool success = VkCheck("vkCreateDescriptorPool failure", result);
|
|
assert(success, "vkCreateDescriptorPool error");
|
|
|
|
if (vk.active_pool == null || vk.active_pool == VK_NULL_HANDLE)
|
|
{
|
|
Node!(VkDescriptorPool)* node = Alloc!(Node!(VkDescriptorPool));
|
|
node.value = vk.active_pool;
|
|
PushFront(&vk.full_pools, node, null);
|
|
}
|
|
|
|
vk.active_pool = pool;
|
|
}
|
|
|
|
bool
|
|
InitGlobalDescSet(Vulkan* vk)
|
|
{
|
|
VkPhysicalDeviceProperties props;
|
|
vkGetPhysicalDeviceProperties(vk.physical_device, &props);
|
|
|
|
VkSamplerCreateInfo sampler_info = {
|
|
sType: VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
|
|
magFilter: VK_FILTER_NEAREST,
|
|
minFilter: VK_FILTER_NEAREST,
|
|
addressModeU: VK_SAMPLER_ADDRESS_MODE_REPEAT,
|
|
addressModeV: VK_SAMPLER_ADDRESS_MODE_REPEAT,
|
|
addressModeW: VK_SAMPLER_ADDRESS_MODE_REPEAT,
|
|
anisotropyEnable: VK_TRUE,
|
|
maxAnisotropy: props.limits.maxSamplerAnisotropy,
|
|
borderColor: VK_BORDER_COLOR_INT_OPAQUE_BLACK,
|
|
compareOp: VK_COMPARE_OP_ALWAYS,
|
|
mipmapMode: VK_SAMPLER_MIPMAP_MODE_LINEAR,
|
|
};
|
|
|
|
VkResult result = vkCreateSampler(vk.device, &sampler_info, null, &vk.nearest_sampler);
|
|
bool success = VkCheck("vkCreateSampler failure", result);
|
|
|
|
if (success)
|
|
{
|
|
DescLayoutBinding[2] layout_bindings = [
|
|
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
|
|
{ binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
|
|
];
|
|
|
|
vk.global_set_layout = CreateDescSetLayout(vk, layout_bindings);
|
|
vk.global_set = AllocDescSet(vk, vk.global_set_layout);
|
|
|
|
WriteDrawImageDesc(vk);
|
|
|
|
VkDescriptorImageInfo sampler_desc_info = {
|
|
sampler: vk.nearest_sampler,
|
|
};
|
|
|
|
VkWriteDescriptorSet write = {
|
|
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
dstSet: vk.global_set.handle,
|
|
dstBinding: 1,
|
|
descriptorCount: 1,
|
|
descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER,
|
|
pImageInfo: &sampler_desc_info,
|
|
};
|
|
|
|
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
|
|
}
|
|
|
|
|
|
return success;
|
|
}
|
|
|
|
void
|
|
WriteGUI(Vulkan* vk, DescSet set, ImageView* atlas, Buffer* buf)
|
|
{
|
|
VkDescriptorImageInfo image_info = {
|
|
imageView: atlas.view,
|
|
imageLayout: atlas.layout,
|
|
};
|
|
|
|
VkDescriptorBufferInfo buf_info = {
|
|
buffer: buf.buffer,
|
|
range: buf.size,
|
|
offset: 0,
|
|
};
|
|
|
|
VkWriteDescriptorSet[2] writes = [
|
|
{
|
|
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
dstSet: set.handle,
|
|
dstBinding: 0,
|
|
descriptorCount: 1,
|
|
descriptorType: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
|
|
pImageInfo: &image_info,
|
|
},
|
|
{
|
|
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
dstSet: set.handle,
|
|
dstBinding: 1,
|
|
descriptorCount: 1,
|
|
descriptorType: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
pBufferInfo: &buf_info,
|
|
}
|
|
];
|
|
|
|
vkUpdateDescriptorSets(vk.device, cast(u32)writes.length, writes.ptr, 0, null);
|
|
}
|
|
|
|
void
|
|
WriteDrawImageDesc(Vulkan* vk)
|
|
{
|
|
VkDescriptorImageInfo draw_image_info = {
|
|
imageView: vk.draw_image.view,
|
|
imageLayout: VK_IMAGE_LAYOUT_GENERAL,
|
|
};
|
|
|
|
VkWriteDescriptorSet write = {
|
|
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
dstSet: vk.global_set.handle,
|
|
dstBinding: 0,
|
|
descriptorCount: 1,
|
|
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
|
|
pImageInfo: &draw_image_info,
|
|
};
|
|
|
|
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
|
|
}
|
|
|
|
void
|
|
WriteConvDescriptor(Vulkan* vk, Buffer* buf)
|
|
{
|
|
VkDescriptorBufferInfo buf_info = {
|
|
buffer: buf.buffer,
|
|
range: buf.size,
|
|
offset: 0,
|
|
};
|
|
|
|
VkWriteDescriptorSet write = {
|
|
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
dstSet: vk.conv_desc_set.handle,
|
|
dstBinding: 1,
|
|
descriptorCount: 1,
|
|
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
|
|
pBufferInfo: &buf_info,
|
|
};
|
|
|
|
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
|
|
}
|
|
|
|
void
|
|
WriteConvDescriptor(Vulkan* vk, ImageView* view)
|
|
{
|
|
VkDescriptorImageInfo image_info = {
|
|
imageView: view.view,
|
|
imageLayout: VK_IMAGE_LAYOUT_GENERAL,
|
|
};
|
|
|
|
VkWriteDescriptorSet write = {
|
|
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
|
|
dstSet: vk.conv_desc_set.handle,
|
|
dstBinding: 0,
|
|
descriptorCount: 1,
|
|
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
|
|
pImageInfo: &image_info,
|
|
};
|
|
|
|
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
|
|
}
|
|
|
|
pragma(inline): void
|
|
Dispatch(Vulkan* vk, VkCommandBuffer cmd)
|
|
{
|
|
f32 w = Ceil(cast(f32)(vk.swapchain_extent.width) / 16.0F);
|
|
f32 h = Ceil(cast(f32)(vk.swapchain_extent.height) / 16.0F);
|
|
vkCmdDispatch(cmd, cast(u32)w, cast(u32)h, 1);
|
|
}
|
|
|
|
pragma(inline): void
|
|
Dispatch(Vulkan* vk)
|
|
{
|
|
Dispatch(vk, vk.cmds[vk.frame_index]);
|
|
}
|
|
|
|
bool
|
|
VkCheck(string message, VkResult result)
|
|
{
|
|
bool success = true;
|
|
|
|
// TODO: Handle error cases that can be handled
|
|
if (result == VK_ERROR_OUT_OF_DEVICE_MEMORY)
|
|
{
|
|
assert(false, "Handle VK_ERROR_OUT_OF_DEVICE_MEMORY");
|
|
}
|
|
else if (result == VK_ERROR_OUT_OF_HOST_MEMORY)
|
|
{
|
|
assert(false, "Handle VK_ERROR_OUT_OF_HOST_MEMORY");
|
|
}
|
|
else if (result != VK_SUCCESS)
|
|
{
|
|
success = false;
|
|
char[512] buf;
|
|
buf[] = '\0';
|
|
buf.sformat("%s: %s", message, VkResultStr(result));
|
|
Logf("%r", buf);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
void
|
|
VkCheckA(string message, VkResult result)
|
|
{
|
|
assert(VkCheck(message, result), "Aborting program due to failure");
|
|
}
|
|
|
|
bool
|
|
InitFrameStructures(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.FrameStructures);
|
|
|
|
bool success = true;
|
|
Arena* arena = &vk.frame_arenas[0];
|
|
|
|
VkSemaphoreCreateInfo sem_info = { sType: VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO };
|
|
|
|
VkCommandPoolCreateInfo pool_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
|
|
flags: VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
|
|
};
|
|
|
|
VkFenceCreateInfo fence_info = {
|
|
sType: VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
|
|
flags: VK_FENCE_CREATE_SIGNALED_BIT,
|
|
};
|
|
|
|
VkCommandBufferAllocateInfo cmd_info = {
|
|
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
|
|
commandBufferCount: 1,
|
|
level: VK_COMMAND_BUFFER_LEVEL_PRIMARY,
|
|
};
|
|
|
|
u32 sem_count = cast(u32)vk.present_images.length;
|
|
vk.submit_sems = AllocArray!(VkSemaphore)(arena, sem_count);
|
|
|
|
foreach(i; 0 .. sem_count)
|
|
{
|
|
if (success)
|
|
{
|
|
VkResult result = vkCreateSemaphore(vk.device, &sem_info, null, vk.submit_sems.ptr + i);
|
|
success = VkCheck("vkCreateSemaphore failure", result);
|
|
}
|
|
}
|
|
|
|
foreach(i; 0 .. FRAME_OVERLAP)
|
|
{
|
|
VkResult result;
|
|
|
|
if (success)
|
|
{
|
|
pool_info.queueFamilyIndex = vk.gfx_index;
|
|
result = vkCreateCommandPool(vk.device, &pool_info, null, vk.cmd_pools.ptr + i);
|
|
success = VkCheck("vkCreateCommandPool failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
cmd_info.commandPool = vk.cmd_pools[i];
|
|
result = vkAllocateCommandBuffers(vk.device, &cmd_info, vk.cmds.ptr + i);
|
|
success = VkCheck("vkAllocateCommandBuffers failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
result = vkCreateFence(vk.device, &fence_info, null, vk.render_fences.ptr + i);
|
|
success = VkCheck("vkCreateFence failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
result = vkCreateSemaphore(vk.device, &sem_info, null, vk.acquire_sems.ptr + i);
|
|
success = VkCheck("vkCreateSemaphore failure", result);
|
|
}
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
pool_info.queueFamilyIndex = vk.tfer_index;
|
|
VkResult result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.imm_pool);
|
|
success = VkCheck("vkCreateCommandPool failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
cmd_info.commandPool = vk.imm_pool;
|
|
VkResult result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.imm_cmd);
|
|
success = VkCheck("vkAllocateCommandBuffers failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
VkResult result = vkCreateFence(vk.device, &fence_info, null, &vk.imm_fence);
|
|
success = VkCheck("vkCreateFence failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
pool_info.queueFamilyIndex = vk.gfx_index;
|
|
VkResult result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.comp_cmd_pool);
|
|
success = VkCheck("vkCreateCommandPool failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
cmd_info.commandPool = vk.comp_cmd_pool;
|
|
VkResult result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.comp_cmd);
|
|
success = VkCheck("vkCreateCommandPool failure", result);
|
|
}
|
|
|
|
if (success)
|
|
{
|
|
VkResult result = vkCreateFence(vk.device, &fence_info, null, &vk.comp_fence);
|
|
success = VkCheck("vkCreateFence failure", result);
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
Format
|
|
GetDrawImageFormat(Vulkan* vk)
|
|
{
|
|
VkFormat selected_format;
|
|
foreach(format; VK_IMAGE_FORMATS)
|
|
{
|
|
VkImageFormatProperties props;
|
|
VkResult result = vkGetPhysicalDeviceImageFormatProperties(
|
|
vk.physical_device,
|
|
format,
|
|
VK_IMAGE_TYPE_2D,
|
|
VK_IMAGE_TILING_OPTIMAL,
|
|
IU.Draw,
|
|
0,
|
|
&props
|
|
);
|
|
if (result == VK_ERROR_FORMAT_NOT_SUPPORTED)
|
|
{
|
|
continue;
|
|
}
|
|
|
|
if (result == VK_SUCCESS)
|
|
{
|
|
selected_format = format;
|
|
break;
|
|
}
|
|
}
|
|
|
|
return cast(Format)selected_format;
|
|
}
|
|
|
|
bool
|
|
CreateDrawImages(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.DrawImages);
|
|
|
|
bool success = true;
|
|
|
|
Format draw_format = cast(Format)GetDrawImageFormat(vk);
|
|
Format depth_format = cast(Format)VK_FORMAT_D32_SFLOAT;
|
|
|
|
u32 w = vk.swapchain_extent.width;
|
|
u32 h = vk.swapchain_extent.height;
|
|
|
|
CreateImageView(vk, &vk.draw_image, w, h, draw_format, IU.Draw, false);
|
|
CreateImageView(vk, &vk.depth_image, w, h, depth_format, IU.Depth, true);
|
|
|
|
return success;
|
|
}
|
|
|
|
void
|
|
SelectSwapchainFormats(Vulkan* vk)
|
|
{
|
|
Arena* arena = &vk.frame_arenas[0];
|
|
|
|
u32 format_count;
|
|
vkGetPhysicalDeviceSurfaceFormatsKHR(vk.physical_device, vk.surface, &format_count, null);
|
|
VkSurfaceFormatKHR[] formats = AllocArray!(VkSurfaceFormatKHR)(arena, format_count);
|
|
vkGetPhysicalDeviceSurfaceFormatsKHR(vk.physical_device, vk.surface, &format_count, formats.ptr);
|
|
|
|
u32 mode_count;
|
|
vkGetPhysicalDeviceSurfacePresentModesKHR(vk.physical_device, vk.surface, &mode_count, null);
|
|
VkPresentModeKHR[] modes = AllocArray!(VkPresentModeKHR)(arena, mode_count);
|
|
vkGetPhysicalDeviceSurfacePresentModesKHR(vk.physical_device, vk.surface, &mode_count, modes.ptr);
|
|
|
|
VkPresentModeKHR present_mode = VK_PRESENT_MODE_FIFO_KHR;
|
|
foreach(mode; modes)
|
|
{
|
|
if (mode == VK_PRESENT_MODE_MAILBOX_KHR)
|
|
{
|
|
present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
|
|
break;
|
|
}
|
|
}
|
|
|
|
vk.surface_format = formats[0];
|
|
vk.present_mode = present_mode;
|
|
}
|
|
|
|
bool
|
|
CreateSwapchain(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.Swapchain);
|
|
|
|
bool success = true;
|
|
Arena* arena = &vk.frame_arenas[0];
|
|
|
|
VkSurfaceCapabilitiesKHR cap;
|
|
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vk.physical_device, vk.surface, &cap);
|
|
|
|
static bool initialized = false;
|
|
if (!initialized)
|
|
{
|
|
SelectSwapchainFormats(vk);
|
|
}
|
|
|
|
VkSwapchainCreateInfoKHR info = {
|
|
sType: VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
|
|
imageArrayLayers: 1,
|
|
imageUsage: IU.Swapchain,
|
|
compositeAlpha: VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
|
|
clipped: VK_TRUE,
|
|
imageSharingMode: VK_SHARING_MODE_EXCLUSIVE,
|
|
minImageCount: cap.minImageCount + 1,
|
|
surface: vk.surface,
|
|
imageFormat: vk.surface_format.format,
|
|
imageColorSpace: vk.surface_format.colorSpace,
|
|
imageExtent: {
|
|
width: clamp(cast(u32)vk.window_w, cap.minImageExtent.width, cap.maxImageExtent.width),
|
|
height: clamp(cast(u32)vk.window_h, cap.minImageExtent.height, cap.maxImageExtent.height),
|
|
},
|
|
preTransform: cap.currentTransform,
|
|
presentMode: vk.present_mode,
|
|
};
|
|
|
|
VkResult result = vkCreateSwapchainKHR(vk.device, &info, null, &vk.swapchain);
|
|
success = VkCheck("vkCreateSwapchainKHR failure", result);
|
|
|
|
u32 count;
|
|
vkGetSwapchainImagesKHR(vk.device, vk.swapchain, &count, null);
|
|
VkImage[] images = AllocArray!(VkImage)(arena, count);
|
|
vkGetSwapchainImagesKHR(vk.device, vk.swapchain, &count, images.ptr);
|
|
VkImageView[] views = AllocArray!(VkImageView)(arena, count);
|
|
|
|
vk.present_images = AllocArray!(ImageView)(&vk.arena, count);
|
|
|
|
VkImageViewCreateInfo view_info = {
|
|
sType: VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
|
|
viewType: VK_IMAGE_VIEW_TYPE_2D,
|
|
components: {
|
|
r: VK_COMPONENT_SWIZZLE_IDENTITY,
|
|
g: VK_COMPONENT_SWIZZLE_IDENTITY,
|
|
b: VK_COMPONENT_SWIZZLE_IDENTITY,
|
|
a: VK_COMPONENT_SWIZZLE_IDENTITY,
|
|
},
|
|
subresourceRange: {
|
|
aspectMask: VK_IMAGE_ASPECT_COLOR_BIT,
|
|
baseMipLevel: 0,
|
|
levelCount: 1,
|
|
baseArrayLayer: 0,
|
|
layerCount: 1,
|
|
},
|
|
};
|
|
|
|
VkFramebufferCreateInfo framebuffer_info = {
|
|
sType: VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
|
|
};
|
|
|
|
foreach(i, image; vk.present_images)
|
|
{
|
|
vk.present_images[i].image = images[i];
|
|
vk.present_images[i].format = cast(Format)vk.surface_format.format;
|
|
view_info.image = images[i];
|
|
view_info.format = vk.surface_format.format;
|
|
|
|
result = vkCreateImageView(vk.device, &view_info, null, &vk.present_images[i].view);
|
|
success = VkCheck("vkCreateImageView failure", result);
|
|
}
|
|
|
|
vk.swapchain_extent.width = info.imageExtent.width;
|
|
vk.swapchain_extent.height = info.imageExtent.height;
|
|
vk.swapchain_extent.depth = 1;
|
|
|
|
if (!initialized && success)
|
|
{
|
|
initialized = true;
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
void
|
|
RecreateSwapchain(Vulkan* vk)
|
|
{
|
|
vkDeviceWaitIdle(vk.device);
|
|
|
|
Destroy(vk.swapchain, vk.present_images, vk.device);
|
|
Destroy(vk, &vk.draw_image);
|
|
Destroy(vk, &vk.depth_image);
|
|
vkDestroyFramebuffer(vk.device, vk.framebuffer, null);
|
|
|
|
CreateSwapchain(vk);
|
|
CreateDrawImages(vk);
|
|
WriteDrawImageDesc(vk);
|
|
CreateFramebuffer(vk);
|
|
}
|
|
|
|
bool
|
|
InitVMA(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.Vma);
|
|
|
|
bool success = true;
|
|
|
|
VmaVulkanFunctions vk_functions = {
|
|
vkGetInstanceProcAddr: vkGetInstanceProcAddr,
|
|
vkGetDeviceProcAddr: vkGetDeviceProcAddr,
|
|
};
|
|
|
|
VmaAllocatorCreateInfo info = {
|
|
flags: VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT,
|
|
vulkanApiVersion: VK_MAKE_API_VERSION(0, 1, 2, 0),
|
|
pVulkanFunctions: &vk_functions,
|
|
physicalDevice: vk.physical_device,
|
|
device: vk.device,
|
|
instance: vk.instance,
|
|
};
|
|
|
|
VkResult result = vmaCreateAllocator(&info, &vk.vma);
|
|
success = VkCheck("vmaCreateAllocator failure", result);
|
|
|
|
return success;
|
|
}
|
|
|
|
bool
|
|
InitDevice(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.Device);
|
|
|
|
bool success = false;
|
|
Arena* arena = &vk.frame_arenas[0];
|
|
|
|
u32 count;
|
|
vkEnumeratePhysicalDevices(vk.instance, &count, null);
|
|
VkPhysicalDevice[] devices = AllocArray!(VkPhysicalDevice)(arena, count);
|
|
vkEnumeratePhysicalDevices(vk.instance, &count, devices.ptr);
|
|
|
|
VkPhysicalDevice physical_device = null;
|
|
bool discrete_candidate = false;
|
|
QueueInfo candidate = {
|
|
gfx_index: -1,
|
|
tfer_index: -1,
|
|
single_queue: false,
|
|
};
|
|
|
|
foreach(dev; devices)
|
|
{
|
|
QueueInfo current = CheckQueueProperties(arena, dev, vk.surface);
|
|
b32 discrete = false;
|
|
|
|
if (current.gfx_index < 0)
|
|
continue;
|
|
if (!CheckDeviceProperties(arena, dev, vk.surface, &discrete))
|
|
continue;
|
|
if (discrete_candidate && !discrete)
|
|
continue;
|
|
if (!CheckDeviceFeatures(dev))
|
|
continue;
|
|
|
|
discrete_candidate = cast(bool)discrete;
|
|
candidate = current;
|
|
physical_device = dev;
|
|
|
|
if (discrete_candidate && !candidate.single_queue)
|
|
continue;
|
|
}
|
|
|
|
if (physical_device)
|
|
{
|
|
VkDeviceQueueCreateInfo[2] queue_info;
|
|
f32 priority = 1.0f;
|
|
count = 1;
|
|
|
|
queue_info[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
|
|
queue_info[0].queueFamilyIndex = candidate.gfx_index;
|
|
queue_info[0].queueCount = 1;
|
|
queue_info[0].pQueuePriorities = &priority;
|
|
queue_info[0].flags = 0;
|
|
|
|
if (!candidate.single_queue)
|
|
{
|
|
queue_info[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
|
|
queue_info[1].queueFamilyIndex = candidate.tfer_index;
|
|
queue_info[1].queueCount = 1;
|
|
queue_info[1].pQueuePriorities = &priority;
|
|
queue_info[1].flags = 0;
|
|
|
|
count += 1;
|
|
}
|
|
|
|
VkPhysicalDeviceSynchronization2Features synchronization2 = {
|
|
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES,
|
|
synchronization2: VK_TRUE,
|
|
};
|
|
|
|
VkPhysicalDeviceVulkan12Features features_12 = {
|
|
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
|
|
pNext: &synchronization2,
|
|
descriptorIndexing: VK_TRUE,
|
|
bufferDeviceAddress: VK_TRUE,
|
|
descriptorBindingUniformBufferUpdateAfterBind: VK_TRUE,
|
|
descriptorBindingSampledImageUpdateAfterBind: VK_TRUE,
|
|
descriptorBindingStorageImageUpdateAfterBind: VK_TRUE,
|
|
descriptorBindingStorageBufferUpdateAfterBind: VK_TRUE,
|
|
descriptorBindingStorageTexelBufferUpdateAfterBind: VK_TRUE,
|
|
descriptorBindingPartiallyBound: VK_TRUE,
|
|
shaderSampledImageArrayNonUniformIndexing: VK_TRUE,
|
|
shaderUniformBufferArrayNonUniformIndexing: VK_TRUE,
|
|
runtimeDescriptorArray: VK_TRUE,
|
|
storageBuffer8BitAccess: VK_TRUE,
|
|
};
|
|
|
|
VkPhysicalDeviceVulkan11Features features_11 = {
|
|
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
|
|
pNext: &features_12,
|
|
};
|
|
|
|
VkPhysicalDeviceFeatures2 features = {
|
|
sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
|
|
pNext: &features_11,
|
|
features: {
|
|
shaderUniformBufferArrayDynamicIndexing: VK_TRUE,
|
|
shaderSampledImageArrayDynamicIndexing: VK_TRUE,
|
|
shaderStorageBufferArrayDynamicIndexing: VK_TRUE,
|
|
shaderStorageImageArrayDynamicIndexing: VK_TRUE,
|
|
samplerAnisotropy: VK_TRUE,
|
|
fragmentStoresAndAtomics: VK_TRUE,
|
|
},
|
|
};
|
|
|
|
VkDeviceCreateInfo device_info = {
|
|
sType: VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
|
|
pNext: &features,
|
|
ppEnabledExtensionNames: VK_DEVICE_EXTENSIONS.ptr,
|
|
enabledExtensionCount: cast(u32)VK_DEVICE_EXTENSIONS.length,
|
|
queueCreateInfoCount: count,
|
|
pQueueCreateInfos: queue_info.ptr,
|
|
pEnabledFeatures: null,
|
|
};
|
|
|
|
VkResult result = vkCreateDevice(physical_device, &device_info, null, &vk.device);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Logf("vkCreateDevices failure: %s", VkResultStr(result));
|
|
}
|
|
else
|
|
{
|
|
LoadDeviceFunctions(vk);
|
|
|
|
vkGetDeviceQueue(
|
|
vk.device,
|
|
candidate.gfx_index,
|
|
0,
|
|
&candidate.gfx_queue
|
|
);
|
|
|
|
if (!candidate.single_queue)
|
|
{
|
|
vkGetDeviceQueue(
|
|
vk.device,
|
|
candidate.tfer_index,
|
|
candidate.tfer_index == candidate.gfx_index ? 1 : 0,
|
|
&candidate.tfer_queue
|
|
);
|
|
}
|
|
else
|
|
{
|
|
candidate.tfer_queue = candidate.gfx_queue;
|
|
candidate.tfer_index = candidate.gfx_index;
|
|
}
|
|
|
|
vk.physical_device = physical_device;
|
|
vk.queues = candidate;
|
|
|
|
success = true;
|
|
}
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
bool
|
|
CheckDeviceFeatures(VkPhysicalDevice device)
|
|
{
|
|
VkPhysicalDeviceFeatures2 features2 = { sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
|
|
VkPhysicalDeviceVulkan12Features features_12 = { sType: VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES };
|
|
|
|
features2.pNext = &features_12;
|
|
vkGetPhysicalDeviceFeatures2(device, &features2);
|
|
|
|
VkPhysicalDeviceFeatures features = features2.features;
|
|
bool result = true;
|
|
|
|
result &= cast(bool)features.fragmentStoresAndAtomics;
|
|
result &= cast(bool)features.shaderUniformBufferArrayDynamicIndexing;
|
|
result &= cast(bool)features.shaderSampledImageArrayDynamicIndexing;
|
|
result &= cast(bool)features.shaderStorageBufferArrayDynamicIndexing;
|
|
result &= cast(bool)features.shaderStorageImageArrayDynamicIndexing;
|
|
result &= cast(bool)features.samplerAnisotropy;
|
|
|
|
result &= cast(bool)features_12.descriptorIndexing;
|
|
result &= cast(bool)features_12.bufferDeviceAddress;
|
|
result &= cast(bool)features_12.descriptorBindingUniformBufferUpdateAfterBind;
|
|
result &= cast(bool)features_12.descriptorBindingStorageTexelBufferUpdateAfterBind;
|
|
result &= cast(bool)features_12.descriptorBindingSampledImageUpdateAfterBind;
|
|
result &= cast(bool)features_12.descriptorBindingStorageImageUpdateAfterBind;
|
|
result &= cast(bool)features_12.descriptorBindingStorageBufferUpdateAfterBind;
|
|
result &= cast(bool)features_12.descriptorBindingPartiallyBound;
|
|
result &= cast(bool)features_12.runtimeDescriptorArray;
|
|
result &= cast(bool)features_12.shaderSampledImageArrayNonUniformIndexing;
|
|
result &= cast(bool)features_12.shaderUniformBufferArrayNonUniformIndexing;
|
|
result &= cast(bool)features_12.timelineSemaphore;
|
|
result &= cast(bool)features_12.storageBuffer8BitAccess;
|
|
|
|
return result;
|
|
}
|
|
|
|
bool
|
|
CheckDeviceProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surface, b32* discrete)
|
|
{
|
|
bool success = false;
|
|
|
|
VkPhysicalDeviceProperties props;
|
|
vkGetPhysicalDeviceProperties(device, &props);
|
|
|
|
if (VK_API_VERSION_MINOR(props.apiVersion) >= 3)
|
|
{
|
|
u32 ext_count;
|
|
vkEnumerateDeviceExtensionProperties(device, null, &ext_count, null);
|
|
VkExtensionProperties[] ext_props = AllocArray!(VkExtensionProperties)(arena, ext_count);
|
|
vkEnumerateDeviceExtensionProperties(device, null, &ext_count, ext_props.ptr);
|
|
|
|
i32 matched = 0;
|
|
foreach(prop; ext_props)
|
|
{
|
|
foreach(ext; VK_DEVICE_EXTENSIONS)
|
|
{
|
|
if (strcmp(cast(char*)prop.extensionName, ext) == 0)
|
|
{
|
|
matched += 1;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (matched == VK_DEVICE_EXTENSIONS.length)
|
|
{
|
|
u32 fmt_count, present_count;
|
|
vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &fmt_count, null);
|
|
vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_count, null);
|
|
|
|
*discrete = props.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
|
|
success = fmt_count && present_count;
|
|
}
|
|
}
|
|
|
|
return success;
|
|
}
|
|
|
|
QueueInfo
|
|
CheckQueueProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surface)
|
|
{
|
|
const u32 T_BIT = VK_QUEUE_TRANSFER_BIT;
|
|
const u32 C_BIT = VK_QUEUE_COMPUTE_BIT;
|
|
const u32 G_BIT = VK_QUEUE_GRAPHICS_BIT;
|
|
const u32 S_BIT = VK_QUEUE_SPARSE_BINDING_BIT;
|
|
|
|
QueueInfo current = {
|
|
gfx_index: -1,
|
|
tfer_index: -1,
|
|
single_queue: false,
|
|
};
|
|
|
|
u32 count;
|
|
vkGetPhysicalDeviceQueueFamilyProperties(device, &count, null);
|
|
VkQueueFamilyProperties[] properties = AllocArray!(VkQueueFamilyProperties)(arena, count);
|
|
vkGetPhysicalDeviceQueueFamilyProperties(device, &count, properties.ptr);
|
|
|
|
if (count == 1 && properties[0].queueCount == 1 && BitEq(properties[0].queueFlags, T_BIT | C_BIT | G_BIT))
|
|
{
|
|
current.gfx_index = current.tfer_index = 0;
|
|
current.single_queue = true;
|
|
}
|
|
else
|
|
{
|
|
bool sparse = false, tfer_only = false;
|
|
|
|
foreach(i, prop; properties)
|
|
{
|
|
b32 surface_support;
|
|
vkGetPhysicalDeviceSurfaceSupportKHR(device, cast(u32)i, surface, &surface_support);
|
|
|
|
if (current.gfx_index < 0 && surface_support && BitEq(prop.queueFlags, G_BIT))
|
|
{
|
|
current.gfx_index = cast(i32)i;
|
|
continue;
|
|
}
|
|
|
|
if (BitEq(prop.queueFlags, T_BIT | S_BIT) && !BitEq(prop.queueFlags, G_BIT | C_BIT))
|
|
{
|
|
sparse = true;
|
|
tfer_only = true;
|
|
current.tfer_index = cast(i32)i;
|
|
continue;
|
|
}
|
|
|
|
if (!(sparse && tfer_only) && BitEq(prop.queueFlags, T_BIT | S_BIT))
|
|
{
|
|
sparse = true;
|
|
current.tfer_index = cast(i32)i;
|
|
continue;
|
|
}
|
|
|
|
if (!sparse && !BitEq(prop.queueFlags, T_BIT) && BitEq(prop.queueFlags, C_BIT))
|
|
{
|
|
tfer_only = true;
|
|
current.tfer_index = cast(i32)i;
|
|
continue;
|
|
}
|
|
|
|
if (!sparse && !tfer_only && BitEq(prop.queueFlags, C_BIT))
|
|
{
|
|
current.tfer_index = cast(i32)i;
|
|
}
|
|
}
|
|
|
|
if (current.tfer_index < 0)
|
|
{
|
|
current.tfer_index = current.gfx_index;
|
|
}
|
|
}
|
|
|
|
return current;
|
|
}
|
|
|
|
pragma(inline): void
|
|
Push(Vulkan* vk, StepInitialized step)
|
|
{
|
|
Node!(SI)* node = Alloc!(Node!(SI));
|
|
node.value = step;
|
|
PushFront(&vk.cleanup_list, node, null);
|
|
}
|
|
|
|
void
|
|
DestroyRenderer(Vulkan* vk)
|
|
{
|
|
foreach(i, arena; vk.frame_arenas)
|
|
{
|
|
Free(vk.frame_arenas.ptr + i);
|
|
}
|
|
|
|
Free(&vk.arena);
|
|
}
|
|
|
|
void
|
|
Destroy(VkInstance instance)
|
|
{
|
|
if (instance)
|
|
{
|
|
vkDestroyInstance(instance, null);
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(VkDebugUtilsMessengerEXT dbg, VkInstance instance)
|
|
{
|
|
debug
|
|
{
|
|
if (dbg)
|
|
{
|
|
vkDestroyDebugUtilsMessengerEXT(instance, dbg, null);
|
|
}
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(VkSurfaceKHR surface, VkInstance instance)
|
|
{
|
|
if (surface)
|
|
{
|
|
vkDestroySurfaceKHR(instance, surface, null);
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(VkDevice device)
|
|
{
|
|
if (device)
|
|
{
|
|
vkDestroyDevice(device, null);
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(VmaAllocator vma)
|
|
{
|
|
if (vma)
|
|
{
|
|
vmaDestroyAllocator(vma);
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(VkSwapchainKHR swapchain, ImageView[] views, VkDevice device)
|
|
{
|
|
foreach(view; views)
|
|
{
|
|
if (view.view)
|
|
{
|
|
vkDestroyImageView(device, view.view, null);
|
|
}
|
|
}
|
|
|
|
if (swapchain)
|
|
{
|
|
vkDestroySwapchainKHR(device, swapchain, null);
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(Vulkan* vk, ImageView* view)
|
|
{
|
|
if (view.view)
|
|
{
|
|
vkDestroyImageView(vk.device, view.view, null);
|
|
}
|
|
|
|
if (view.image)
|
|
{
|
|
vmaDestroyImage(vk.vma, view.image, view.alloc);
|
|
}
|
|
}
|
|
|
|
void
|
|
Destroy(VkDescriptorPool pool, VkDescriptorSetLayout[] layouts, VkPipelineLayout pipeline_layout, VkSampler sampler, VkDevice device)
|
|
{
|
|
if (sampler)
|
|
{
|
|
vkDestroySampler(device, sampler, null);
|
|
}
|
|
|
|
if (pipeline_layout)
|
|
{
|
|
vkDestroyPipelineLayout(device, pipeline_layout, null);
|
|
}
|
|
|
|
foreach(layout; layouts)
|
|
{
|
|
if (layout)
|
|
{
|
|
vkDestroyDescriptorSetLayout(device, layout, null);
|
|
}
|
|
}
|
|
|
|
if (pool)
|
|
{
|
|
vkDestroyDescriptorPool(device, pool, null);
|
|
}
|
|
}
|
|
|
|
void
|
|
DestroyFS(Vulkan* vk)
|
|
{
|
|
if (vk.imm_fence)
|
|
{
|
|
vkDestroyFence(vk.device, vk.imm_fence, null);
|
|
}
|
|
|
|
if (vk.imm_cmd)
|
|
{
|
|
vkFreeCommandBuffers(vk.device, vk.imm_pool, 1, &vk.imm_cmd);
|
|
}
|
|
|
|
if (vk.imm_pool)
|
|
{
|
|
vkDestroyCommandPool(vk.device, vk.imm_pool, null);
|
|
}
|
|
|
|
if (vk.comp_cmd)
|
|
{
|
|
vkFreeCommandBuffers(vk.device, vk.comp_cmd_pool, 1, &vk.comp_cmd);
|
|
}
|
|
|
|
if (vk.comp_cmd_pool)
|
|
{
|
|
vkDestroyCommandPool(vk.device, vk.comp_cmd_pool, null);
|
|
}
|
|
|
|
if (vk.comp_fence)
|
|
{
|
|
vkDestroyFence(vk.device, vk.comp_fence, null);
|
|
}
|
|
|
|
foreach(sem; vk.submit_sems)
|
|
{
|
|
if (sem)
|
|
{
|
|
vkDestroySemaphore(vk.device, sem, null);
|
|
}
|
|
}
|
|
|
|
foreach(i; 0 .. FRAME_OVERLAP)
|
|
{
|
|
if (vk.render_fences[i])
|
|
{
|
|
vkDestroyFence(vk.device, vk.render_fences[i], null);
|
|
}
|
|
|
|
if (vk.cmd_pools[i])
|
|
{
|
|
vkFreeCommandBuffers(vk.device, vk.cmd_pools[i], 1, &vk.cmds[i]);
|
|
}
|
|
|
|
if (vk.cmd_pools[i])
|
|
{
|
|
vkDestroyCommandPool(vk.device, vk.cmd_pools[i], null);
|
|
}
|
|
|
|
if (vk.acquire_sems[i])
|
|
{
|
|
vkDestroySemaphore(vk.device, vk.acquire_sems[i], null);
|
|
}
|
|
}
|
|
}
|
|
|
|
bool
|
|
InitInstance(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.Instance);
|
|
|
|
bool success = true;
|
|
Arena* arena = &vk.frame_arenas[0];
|
|
|
|
u32 count;
|
|
vkEnumerateInstanceLayerProperties(&count, null);
|
|
VkLayerProperties[] layers = AllocArray!(VkLayerProperties)(arena, count);
|
|
vkEnumerateInstanceLayerProperties(&count, layers.ptr);
|
|
|
|
foreach(i, layer; layers)
|
|
{
|
|
if (strcmp(cast(char*)&layer.layerName, "VK_LAYER_KHRONOS_validation") == 0)
|
|
{
|
|
g_VLAYER_SUPPORT = true;
|
|
break;
|
|
}
|
|
}
|
|
|
|
const char*[] instance_layers = g_VLAYER_SUPPORT && BUILD_DEBUG ? VK_INSTANCE_LAYERS_DEBUG : VK_INSTANCE_LAYERS;
|
|
const char*[] instance_ext = g_VLAYER_SUPPORT && BUILD_DEBUG ? VK_INSTANCE_EXT_DEBUG : VK_INSTANCE_EXT;
|
|
|
|
VkApplicationInfo app_info = {
|
|
sType: VK_STRUCTURE_TYPE_APPLICATION_INFO,
|
|
pApplicationName: "Video Game",
|
|
applicationVersion: VK_MAKE_API_VERSION(0, 0, 0, 1),
|
|
pEngineName: "Gears",
|
|
engineVersion: VK_MAKE_API_VERSION(0, 0, 0, 1),
|
|
apiVersion: VK_MAKE_API_VERSION(0, 1, 2, 0),
|
|
};
|
|
|
|
VkInstanceCreateInfo instance_info = {
|
|
sType: VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
|
|
pApplicationInfo: &app_info,
|
|
enabledLayerCount: cast(u32)instance_layers.length,
|
|
ppEnabledLayerNames: instance_layers.ptr,
|
|
enabledExtensionCount: cast(u32)instance_ext.length,
|
|
ppEnabledExtensionNames: instance_ext.ptr,
|
|
};
|
|
|
|
VkValidationFeatureEnableEXT validation_enable = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT;
|
|
|
|
VkValidationFeaturesEXT validation_features = {
|
|
sType: VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
|
|
enabledValidationFeatureCount: 1,
|
|
pEnabledValidationFeatures: &validation_enable,
|
|
};
|
|
|
|
debug
|
|
{
|
|
if (g_VLAYER_SUPPORT && g_DEBUG_PRINTF)
|
|
{
|
|
instance_info.pNext = &validation_features;
|
|
}
|
|
}
|
|
|
|
VkResult result = vkCreateInstance(&instance_info, null, &vk.instance);
|
|
success = VkCheck("vkCreateInstance failure", result);
|
|
|
|
return success;
|
|
}
|
|
|
|
bool
|
|
InitSurface(Vulkan* vk)
|
|
{
|
|
Push(vk, SI.Surface);
|
|
|
|
version(linux)
|
|
{
|
|
VkXcbSurfaceCreateInfoKHR surface_info = {
|
|
sType: VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
|
|
connection: vk.platform_handles.conn,
|
|
window: vk.platform_handles.window,
|
|
};
|
|
|
|
VkResult result = vkCreateXcbSurfaceKHR(vk.instance, &surface_info, null, &vk.surface);
|
|
}
|
|
|
|
version(Windows)
|
|
{
|
|
VkWin32SurfaceCreateInfoKHR surface_info = {
|
|
sType: VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
|
|
hinstance: vk.platform_handles.instance,
|
|
hwnd: vk.platform_handles.handle,
|
|
};
|
|
|
|
VkResult result = vkCreateWin32SurfaceKHR(vk.instance, &surface_info, null, &vk.surface);
|
|
}
|
|
|
|
bool success = VkCheck("InitSurface failure", result);
|
|
|
|
return success;
|
|
}
|
|
|
|
void
|
|
EnableVLayers(Vulkan* vk)
|
|
{
|
|
debug
|
|
{
|
|
Push(vk, SI.Debug);
|
|
|
|
if (g_VLAYER_SUPPORT)
|
|
{
|
|
VkDebugUtilsMessengerCreateInfoEXT info = {
|
|
sType: VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
|
|
messageSeverity: VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
|
|
VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
|
|
messageType: VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
|
|
VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
|
|
VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
|
|
pfnUserCallback: cast(PFN_vkDebugUtilsMessengerCallbackEXT)&DebugCallback,
|
|
};
|
|
|
|
if (g_DEBUG_PRINTF)
|
|
{
|
|
info.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
|
|
}
|
|
|
|
VkResult result = vkCreateDebugUtilsMessengerEXT(vk.instance, &info, null, &vk.dbg_msg);
|
|
if (result != VK_SUCCESS)
|
|
{
|
|
Logf("EnableVLayers failed to initialize, will continue without validation: %s", VkResultStr(result));
|
|
}
|
|
}
|
|
else
|
|
{
|
|
Logf("EnableVLayers warning: Not supported on current device, continuing without");
|
|
}
|
|
}
|
|
}
|
|
|
|
void
|
|
PrintShaderDisassembly(Vulkan* vk, Pipeline pipeline_id, VkShaderStageFlagBits stage)
|
|
{
|
|
version(AMD_GPU)
|
|
{
|
|
PipelineHandles* pipeline = vk.pipeline_handles.ptr + pipeline_id;
|
|
|
|
debug
|
|
{
|
|
u64 size;
|
|
VkResult result = vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, null);
|
|
if (result == VK_SUCCESS)
|
|
{
|
|
u8[] buf = AllocArray!(u8)(&vk.frame_arenas[vk.frame_index], size);
|
|
vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, buf.ptr);
|
|
Logf("DISASSEMBLY:\n%r", buf);
|
|
}
|
|
}
|
|
}
|
|
}
|