fix formatting

This commit is contained in:
Matthew 2025-09-15 05:17:35 +10:00
parent 65ac576cb0
commit 7a77505fc1
5 changed files with 136 additions and 136 deletions

View File

@ -24,19 +24,19 @@ void main()
uint x = gl_GlobalInvocationID.x;
uint y = gl_GlobalInvocationID.y;
if (x > PC.x || y > PC.y)
if(x > PC.x || y > PC.y)
{
return;
}
if (CHANNELS == 1)
if(CHANNELS == 1)
{
uint index = x + y * PC.x;
vec4 col = vec4(vec3(uint(src[index]) / 255.0), 1.0);
imageStore(dst, ivec2(x, y), col);
}
else if (CHANNELS == 2)
else if(CHANNELS == 2)
{
uint index = (x + y * PC.x) * 2;
@ -45,7 +45,7 @@ void main()
vec4 col = vec4(f, f, f, a);
imageStore(dst, ivec2(x, y), col);
}
else if (CHANNELS == 3)
else if(CHANNELS == 3)
{
uint index = (x + y * PC.x) * 3;

248
vulkan.d
View File

@ -482,26 +482,26 @@ Init(PlatformHandles platform_handles, u64 permanent_mem, u64 frame_mem)
success = LoadGlobalFunctions();
if (success) success = InitInstance(&vk);
if(success) success = InitInstance(&vk);
if (success)
if(success)
{
LoadInstanceFunctions(&vk);
EnableVLayers(&vk);
}
if (success) success = InitSurface(&vk);
if (success) success = InitDevice(&vk);
if (success) success = InitVMA(&vk);
if (success) success = CreateSwapchain(&vk);
if (success) success = CreateDrawImages(&vk);
if (success) success = InitFrameStructures(&vk);
if (success) InitDescriptors(&vk);
if (success) success = InitGlobalDescSet(&vk);
if (success) InitPipelines(&vk);
if (success) InitBuffers(&vk);
if (success) InitConversionPipeline(&vk);
if (success) InitFramebufferAndRenderPass(&vk);
if(success) success = InitSurface(&vk);
if(success) success = InitDevice(&vk);
if(success) success = InitVMA(&vk);
if(success) success = CreateSwapchain(&vk);
if(success) success = CreateDrawImages(&vk);
if(success) success = InitFrameStructures(&vk);
if(success) InitDescriptors(&vk);
if(success) success = InitGlobalDescSet(&vk);
if(success) InitPipelines(&vk);
if(success) InitBuffers(&vk);
if(success) InitConversionPipeline(&vk);
if(success) InitFramebufferAndRenderPass(&vk);
assert(success, "Error initializing vulkan");
@ -545,7 +545,7 @@ AllocDescSet(Vulkan* vk, DescSetLayout layout, u32 dynamic_count = 0)
dynamic_count: dynamic_count,
};
VkResult result = vkAllocateDescriptorSets(vk.device, &alloc_info, &set.handle);
if (result == VK_ERROR_OUT_OF_POOL_MEMORY || result == VK_ERROR_FRAGMENTED_POOL)
if(result == VK_ERROR_OUT_OF_POOL_MEMORY || result == VK_ERROR_FRAGMENTED_POOL)
{
PushDescriptorPool(vk);
@ -559,19 +559,19 @@ AllocDescSet(Vulkan* vk, DescSetLayout layout, u32 dynamic_count = 0)
}
PipelineLayout
CreatePipelineLayout(T)(Vulkan* vk, T layouts, u32 push_const_size, bool compute = false) if (is(T: DescSetLayout) || is(T: DescSetLayout[]))
CreatePipelineLayout(T)(Vulkan* vk, T layouts, u32 push_const_size, bool compute = false) if(is(T: DescSetLayout) || is(T: DescSetLayout[]))
{
VkShaderStageFlagBits stage = (compute ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
DescSetLayout[] desc_layouts;
static if (is(T: DescSetLayout))
static if(is(T: DescSetLayout))
{
desc_layouts = AllocArray!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], 2);
desc_layouts[0] = vk.global_set_layout;
desc_layouts[1] = layouts;
}
else static if (is(T: DescSetLayout[]))
else static if(is(T: DescSetLayout[]))
{
desc_layouts = AllocArray!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], layouts.length + 1);
desc_layouts[0] = vk.global_set_layout;
@ -663,7 +663,7 @@ CreateBuffer(Vulkan* vk, Buffer* buf, BufferType type, u64 size, bool host_visib
flags: VMA_ALLOCATION_CREATE_MAPPED_BIT,
};
if (host_visible)
if(host_visible)
{
alloc_info.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
alloc_info.preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
@ -675,7 +675,7 @@ CreateBuffer(Vulkan* vk, Buffer* buf, BufferType type, u64 size, bool host_visib
}
u32[2] indices = [vk.queues.gfx_index, vk.queues.tfer_index];
if (vk.queues.gfx_index != vk.queues.tfer_index)
if(vk.queues.gfx_index != vk.queues.tfer_index)
{
buffer_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
buffer_info.queueFamilyIndexCount = 2;
@ -752,11 +752,11 @@ BeginFrame(Vulkan* vk)
VkCheckA("BeginFrame failure: vkResetFences error", result);
result = vkAcquireNextImageKHR(vk.device, vk.swapchain, 1000000000, vk.acquire_sems[vk.frame_index], null, &vk.image_index);
if (result == VK_ERROR_OUT_OF_DATE_KHR)
if(result == VK_ERROR_OUT_OF_DATE_KHR)
{
RecreateSwapchain(vk);
}
else if (result != VK_SUBOPTIMAL_KHR)
else if(result != VK_SUBOPTIMAL_KHR)
{
VkCheckA("BeginFrame failure: vkAcquireNextImageKHR error", result);
}
@ -828,7 +828,7 @@ ResizeDrawImageIfNeeded(Vulkan* vk, ImageView* view)
{
u32[2] ext = GetExtent(vk);
if (view.w != ext[0] || view.h != ext[1])
if(view.w != ext[0] || view.h != ext[1])
{
Destroy(vk, view);
CreateImageView(vk, view, ext[0], ext[1], view.format, view.usage, view.depth_image);
@ -938,7 +938,7 @@ SubmitAndPresent(Vulkan* vk)
};
result = vkQueuePresentKHR(vk.queues.gfx_queue, &present_info);
if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR)
if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR)
{
RecreateSwapchain(vk);
}
@ -966,19 +966,19 @@ ImmSubmitStart(Vulkan* vk)
VkResult result = vkWaitForFences(vk.device, 1, &vk.imm_fence, true, 999999999);
bool success = VkCheck("ImmSubmit failure: vkWaitForFences error", result);
if (success)
if(success)
{
result = vkResetFences(vk.device, 1, &vk.imm_fence);
success = VkCheck("ImmSubmit failure: vkResetFences error", result);
}
if (success)
if(success)
{
result = vkResetCommandBuffer(vk.imm_cmd, 0);
success = VkCheck("ImmSubmit failure: vkResetCommandBuffer error", result);
}
if (success)
if(success)
{
VkCommandBufferBeginInfo cmd_info = {
sType: VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
@ -1015,7 +1015,7 @@ ImmSubmit(Vulkan* vk, Image* image, VkBufferImageCopy copy, void function(Vulkan
{
bool success = ImmSubmitStart(vk);
if (success)
if(success)
{
fn(vk, image, copy);
@ -1023,7 +1023,7 @@ ImmSubmit(Vulkan* vk, Image* image, VkBufferImageCopy copy, void function(Vulkan
success = VkCheck("ImmSubmit failure: vkEndCommandBuffer error", result);
}
if (success)
if(success)
{
success = ImmSubmitFinish(vk);
}
@ -1036,7 +1036,7 @@ ImmSubmit(Vulkan* vk, Buffer* buf, VkBufferCopy copy, void function(Vulkan*, Buf
{
bool success = ImmSubmitStart(vk);
if (success)
if(success)
{
fn(vk, buf, copy);
@ -1044,7 +1044,7 @@ ImmSubmit(Vulkan* vk, Buffer* buf, VkBufferCopy copy, void function(Vulkan*, Buf
success = VkCheck("ImmSubmit failure: vkEndCommandBuffer error", result);
}
if (success)
if(success)
{
success = ImmSubmitFinish(vk);
}
@ -1071,7 +1071,7 @@ CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, u32 ch, u8[] data)
{
CreateImageView(vk, view, w, h, FMT.RGBA_UNORM, IU.Texture);
if (ch == 4)
if(ch == 4)
{
assert(Transfer(vk, view, data, w, h), "CreateImageView failure: Image Transfer error");
}
@ -1218,7 +1218,7 @@ CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, Format format, ImageU
};
u32[2] indices = [vk.gfx_index, vk.tfer_index];
if (vk.gfx_index != vk.tfer_index)
if(vk.gfx_index != vk.tfer_index)
{
image_info.sharingMode = VK_SHARING_MODE_CONCURRENT;
image_info.queueFamilyIndexCount = 2;
@ -1308,10 +1308,10 @@ Transfer(Vulkan* vk, Buffer* buf, u8[] data)
u64 copied = 0;
while(copied != data.length && success)
{
if (copied != 0)
if(copied != 0)
{
success = TransferReady(vk);
if (!success)
if(!success)
{
break;
}
@ -1350,7 +1350,7 @@ Transfer(T)(Vulkan* vk, Buffer* buf, T* ptr)
assert(T.sizeof < vk.transfer_buf.data.length, "Transfer failure: structure size is too large");
bool success = TransferReady(vk);
if (success)
if(success)
{
memcpy(vk.transfer_buf.data.ptr, ptr, T.sizeof);
@ -1495,7 +1495,7 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets, bool compute = false)
BindPipeline(vk, cmd, pipeline);
u32[] offsets;
if (vk.global_set.dynamic_count > 0)
if(vk.global_set.dynamic_count > 0)
{
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], vk.global_set.dynamic_count);
}
@ -1520,7 +1520,7 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets, bool compute = false)
offset_count += set.dynamic_count;
}
if (offset_count > 0)
if(offset_count > 0)
{
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], offset_count);
}
@ -1557,7 +1557,7 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set, bool compute = false)
BindPipeline(vk, cmd, pipeline);
u32[] offsets;
if (vk.global_set.dynamic_count > 0)
if(vk.global_set.dynamic_count > 0)
{
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], vk.global_set.dynamic_count);
}
@ -1573,7 +1573,7 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set, bool compute = false)
offsets.ptr
);
if (set.dynamic_count > 0)
if(set.dynamic_count > 0)
{
offsets = AllocArray!(u32)(&vk.frame_arenas[vk.frame_index], set.dynamic_count);
}
@ -1880,7 +1880,7 @@ CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* b
Destroy(vk, vert_module);
}
if (success)
if(success)
{
__traits(getMember, shader_info.ptr + 0, "module") = frag_module;
__traits(getMember, shader_info.ptr + 1, "module") = vert_module;
@ -1892,7 +1892,7 @@ CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* b
pData: build_info.vert_spec.data,
};
if (build_info.vert_spec.entries.length > 0)
if(build_info.vert_spec.entries.length > 0)
{
shader_info[0].pSpecializationInfo = &vert_spec_info;
}
@ -1904,7 +1904,7 @@ CreateGraphicsPipeline(Vulkan* vk, Pipeline* pipeline_handle, GfxPipelineInfo* b
pData: build_info.frag_spec.data,
};
if (build_info.frag_spec.entries.length > 0)
if(build_info.frag_spec.entries.length > 0)
{
shader_info[1].pSpecializationInfo = &frag_spec_info;
}
@ -1961,7 +1961,7 @@ CreateComputePipeline(Vulkan* vk, CompPipelineInfo* comp_info)
pData: comp_info.spec.data,
};
if (comp_info.spec.entries.length > 0)
if(comp_info.spec.entries.length > 0)
{
info.stage.pSpecializationInfo = &spec_info;
}
@ -2091,7 +2091,7 @@ DestroyDescriptorPools(Vulkan* vk)
Node!(VkDescriptorPool)* node = vk.full_pools.first;
for(;;)
{
if (node == null)
if(node == null)
{
break;
}
@ -2105,27 +2105,27 @@ DestroyDescriptorPools(Vulkan* vk)
void
DestroyPipelines(Vulkan* vk)
{
if (vk.conv_pipeline_layout)
if(vk.conv_pipeline_layout)
{
vkDestroyPipelineLayout(vk.device, vk.conv_pipeline_layout, null);
}
if (vk.conv_desc_layout)
if(vk.conv_desc_layout)
{
vkDestroyDescriptorSetLayout(vk.device, vk.conv_desc_layout, null);
}
if (vk.r_to_rgba_pipeline)
if(vk.r_to_rgba_pipeline)
{
vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.r_to_rgba_pipeline].handle, null);
}
if (vk.rg_to_rgba_pipeline)
if(vk.rg_to_rgba_pipeline)
{
vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.rg_to_rgba_pipeline].handle, null);
}
if (vk.rgb_to_rgba_pipeline)
if(vk.rgb_to_rgba_pipeline)
{
vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.rgb_to_rgba_pipeline].handle, null);
}
@ -2177,7 +2177,7 @@ PushDescriptorPool(Vulkan* vk)
bool success = VkCheck("vkCreateDescriptorPool failure", result);
assert(success, "vkCreateDescriptorPool error");
if (vk.active_pool == null || vk.active_pool == VK_NULL_HANDLE)
if(vk.active_pool == null || vk.active_pool == VK_NULL_HANDLE)
{
Node!(VkDescriptorPool)* node = Alloc!(Node!(VkDescriptorPool));
node.value = vk.active_pool;
@ -2210,7 +2210,7 @@ InitGlobalDescSet(Vulkan* vk)
VkResult result = vkCreateSampler(vk.device, &sampler_info, null, &vk.nearest_sampler);
bool success = VkCheck("vkCreateSampler failure", result);
if (success)
if(success)
{
DescLayoutBinding[2] layout_bindings = [
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
@ -2400,15 +2400,15 @@ VkCheck(string message, VkResult result)
bool success = true;
// TODO: Handle error cases that can be handled
if (result == VK_ERROR_OUT_OF_DEVICE_MEMORY)
if(result == VK_ERROR_OUT_OF_DEVICE_MEMORY)
{
assert(false, "Handle VK_ERROR_OUT_OF_DEVICE_MEMORY");
}
else if (result == VK_ERROR_OUT_OF_HOST_MEMORY)
else if(result == VK_ERROR_OUT_OF_HOST_MEMORY)
{
assert(false, "Handle VK_ERROR_OUT_OF_HOST_MEMORY");
}
else if (result != VK_SUCCESS)
else if(result != VK_SUCCESS)
{
success = false;
char[512] buf;
@ -2457,7 +2457,7 @@ InitFrameStructures(Vulkan* vk)
foreach(i; 0 .. sem_count)
{
if (success)
if(success)
{
VkResult result = vkCreateSemaphore(vk.device, &sem_info, null, vk.submit_sems.ptr + i);
success = VkCheck("vkCreateSemaphore failure", result);
@ -2468,68 +2468,68 @@ InitFrameStructures(Vulkan* vk)
{
VkResult result;
if (success)
if(success)
{
pool_info.queueFamilyIndex = vk.gfx_index;
result = vkCreateCommandPool(vk.device, &pool_info, null, vk.cmd_pools.ptr + i);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
if(success)
{
cmd_info.commandPool = vk.cmd_pools[i];
result = vkAllocateCommandBuffers(vk.device, &cmd_info, vk.cmds.ptr + i);
success = VkCheck("vkAllocateCommandBuffers failure", result);
}
if (success)
if(success)
{
result = vkCreateFence(vk.device, &fence_info, null, vk.render_fences.ptr + i);
success = VkCheck("vkCreateFence failure", result);
}
if (success)
if(success)
{
result = vkCreateSemaphore(vk.device, &sem_info, null, vk.acquire_sems.ptr + i);
success = VkCheck("vkCreateSemaphore failure", result);
}
}
if (success)
if(success)
{
pool_info.queueFamilyIndex = vk.tfer_index;
VkResult result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.imm_pool);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
if(success)
{
cmd_info.commandPool = vk.imm_pool;
VkResult result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.imm_cmd);
success = VkCheck("vkAllocateCommandBuffers failure", result);
}
if (success)
if(success)
{
VkResult result = vkCreateFence(vk.device, &fence_info, null, &vk.imm_fence);
success = VkCheck("vkCreateFence failure", result);
}
if (success)
if(success)
{
pool_info.queueFamilyIndex = vk.gfx_index;
VkResult result = vkCreateCommandPool(vk.device, &pool_info, null, &vk.comp_cmd_pool);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
if(success)
{
cmd_info.commandPool = vk.comp_cmd_pool;
VkResult result = vkAllocateCommandBuffers(vk.device, &cmd_info, &vk.comp_cmd);
success = VkCheck("vkCreateCommandPool failure", result);
}
if (success)
if(success)
{
VkResult result = vkCreateFence(vk.device, &fence_info, null, &vk.comp_fence);
success = VkCheck("vkCreateFence failure", result);
@ -2554,12 +2554,12 @@ GetDrawImageFormat(Vulkan* vk)
0,
&props
);
if (result == VK_ERROR_FORMAT_NOT_SUPPORTED)
if(result == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
continue;
}
if (result == VK_SUCCESS)
if(result == VK_SUCCESS)
{
selected_format = format;
break;
@ -2606,7 +2606,7 @@ SelectSwapchainFormats(Vulkan* vk)
VkPresentModeKHR present_mode = VK_PRESENT_MODE_FIFO_KHR;
foreach(mode; modes)
{
if (mode == VK_PRESENT_MODE_MAILBOX_KHR)
if(mode == VK_PRESENT_MODE_MAILBOX_KHR)
{
present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
break;
@ -2616,7 +2616,7 @@ SelectSwapchainFormats(Vulkan* vk)
VkSurfaceFormatKHR surface_format = formats[0];
foreach(format; formats)
{
if (format.format == VK_FORMAT_B8G8R8A8_UNORM)
if(format.format == VK_FORMAT_B8G8R8A8_UNORM)
{
surface_format = format;
break;
@ -2639,7 +2639,7 @@ CreateSwapchain(Vulkan* vk)
vkGetPhysicalDeviceSurfaceCapabilitiesKHR(vk.physical_device, vk.surface, &cap);
static bool initialized = false;
if (!initialized)
if(!initialized)
{
SelectSwapchainFormats(vk);
}
@ -2711,7 +2711,7 @@ CreateSwapchain(Vulkan* vk)
vk.swapchain_extent.height = info.imageExtent.height;
vk.swapchain_extent.depth = 1;
if (!initialized && success)
if(!initialized && success)
{
initialized = true;
}
@ -2723,11 +2723,11 @@ void
CheckAndRecreateSwapchain(Vulkan* vk)
{
VkResult result = vkGetSwapchainStatusKHR(vk.device, vk.swapchain);
if (result == VK_ERROR_OUT_OF_DATE_KHR)
if(result == VK_ERROR_OUT_OF_DATE_KHR)
{
RecreateSwapchain(vk);
}
else if (result != VK_SUBOPTIMAL_KHR)
else if(result != VK_SUBOPTIMAL_KHR)
{
VkCheckA("BeginFrame failure: vkAcquireNextImageKHR error", result);
}
@ -2802,24 +2802,24 @@ InitDevice(Vulkan* vk)
QueueInfo current = CheckQueueProperties(arena, dev, vk.surface);
b32 discrete = false;
if (current.gfx_index < 0)
if(current.gfx_index < 0)
continue;
if (!CheckDeviceProperties(arena, dev, vk.surface, &discrete))
if(!CheckDeviceProperties(arena, dev, vk.surface, &discrete))
continue;
if (discrete_candidate && !discrete)
if(discrete_candidate && !discrete)
continue;
if (!CheckDeviceFeatures(dev))
if(!CheckDeviceFeatures(dev))
continue;
discrete_candidate = cast(bool)discrete;
candidate = current;
physical_device = dev;
if (discrete_candidate && !candidate.single_queue)
if(discrete_candidate && !candidate.single_queue)
continue;
}
if (physical_device)
if(physical_device)
{
VkDeviceQueueCreateInfo[2] queue_info;
f32 priority = 1.0f;
@ -2831,7 +2831,7 @@ InitDevice(Vulkan* vk)
queue_info[0].pQueuePriorities = &priority;
queue_info[0].flags = 0;
if (!candidate.single_queue)
if(!candidate.single_queue)
{
queue_info[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queue_info[1].queueFamilyIndex = candidate.tfer_index;
@ -2893,7 +2893,7 @@ InitDevice(Vulkan* vk)
};
VkResult result = vkCreateDevice(physical_device, &device_info, null, &vk.device);
if (result != VK_SUCCESS)
if(result != VK_SUCCESS)
{
Logf("vkCreateDevices failure: %s", VkResultStr(result));
}
@ -2908,7 +2908,7 @@ InitDevice(Vulkan* vk)
&candidate.gfx_queue
);
if (!candidate.single_queue)
if(!candidate.single_queue)
{
vkGetDeviceQueue(
vk.device,
@ -2977,7 +2977,7 @@ CheckDeviceProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surfac
VkPhysicalDeviceProperties props;
vkGetPhysicalDeviceProperties(device, &props);
if (VK_API_VERSION_MINOR(props.apiVersion) >= 3)
if(VK_API_VERSION_MINOR(props.apiVersion) >= 3)
{
u32 ext_count;
vkEnumerateDeviceExtensionProperties(device, null, &ext_count, null);
@ -2989,7 +2989,7 @@ CheckDeviceProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surfac
{
foreach(ext; VK_DEVICE_EXTENSIONS)
{
if (strcmp(cast(char*)prop.extensionName, ext) == 0)
if(strcmp(cast(char*)prop.extensionName, ext) == 0)
{
matched += 1;
break;
@ -2997,7 +2997,7 @@ CheckDeviceProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surfac
}
}
if (matched == VK_DEVICE_EXTENSIONS.length)
if(matched == VK_DEVICE_EXTENSIONS.length)
{
u32 fmt_count, present_count;
vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &fmt_count, null);
@ -3030,7 +3030,7 @@ CheckQueueProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surface
VkQueueFamilyProperties[] properties = AllocArray!(VkQueueFamilyProperties)(arena, count);
vkGetPhysicalDeviceQueueFamilyProperties(device, &count, properties.ptr);
if (count == 1 && properties[0].queueCount == 1 && BitEq(properties[0].queueFlags, T_BIT | C_BIT | G_BIT))
if(count == 1 && properties[0].queueCount == 1 && BitEq(properties[0].queueFlags, T_BIT | C_BIT | G_BIT))
{
current.gfx_index = current.tfer_index = 0;
current.single_queue = true;
@ -3044,13 +3044,13 @@ CheckQueueProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surface
b32 surface_support;
vkGetPhysicalDeviceSurfaceSupportKHR(device, cast(u32)i, surface, &surface_support);
if (current.gfx_index < 0 && surface_support && BitEq(prop.queueFlags, G_BIT))
if(current.gfx_index < 0 && surface_support && BitEq(prop.queueFlags, G_BIT))
{
current.gfx_index = cast(i32)i;
continue;
}
if (BitEq(prop.queueFlags, T_BIT | S_BIT) && !BitEq(prop.queueFlags, G_BIT | C_BIT))
if(BitEq(prop.queueFlags, T_BIT | S_BIT) && !BitEq(prop.queueFlags, G_BIT | C_BIT))
{
sparse = true;
tfer_only = true;
@ -3058,27 +3058,27 @@ CheckQueueProperties(Arena *arena, VkPhysicalDevice device, VkSurfaceKHR surface
continue;
}
if (!(sparse && tfer_only) && BitEq(prop.queueFlags, T_BIT | S_BIT))
if(!(sparse && tfer_only) && BitEq(prop.queueFlags, T_BIT | S_BIT))
{
sparse = true;
current.tfer_index = cast(i32)i;
continue;
}
if (!sparse && !BitEq(prop.queueFlags, T_BIT) && BitEq(prop.queueFlags, C_BIT))
if(!sparse && !BitEq(prop.queueFlags, T_BIT) && BitEq(prop.queueFlags, C_BIT))
{
tfer_only = true;
current.tfer_index = cast(i32)i;
continue;
}
if (!sparse && !tfer_only && BitEq(prop.queueFlags, C_BIT))
if(!sparse && !tfer_only && BitEq(prop.queueFlags, C_BIT))
{
current.tfer_index = cast(i32)i;
}
}
if (current.tfer_index < 0)
if(current.tfer_index < 0)
{
current.tfer_index = current.gfx_index;
}
@ -3109,7 +3109,7 @@ DestroyRenderer(Vulkan* vk)
void
Destroy(VkInstance instance)
{
if (instance)
if(instance)
{
vkDestroyInstance(instance, null);
}
@ -3120,7 +3120,7 @@ Destroy(VkDebugUtilsMessengerEXT dbg, VkInstance instance)
{
debug
{
if (dbg)
if(dbg)
{
vkDestroyDebugUtilsMessengerEXT(instance, dbg, null);
}
@ -3130,7 +3130,7 @@ Destroy(VkDebugUtilsMessengerEXT dbg, VkInstance instance)
void
Destroy(VkSurfaceKHR surface, VkInstance instance)
{
if (surface)
if(surface)
{
vkDestroySurfaceKHR(instance, surface, null);
}
@ -3139,7 +3139,7 @@ Destroy(VkSurfaceKHR surface, VkInstance instance)
void
Destroy(VkDevice device)
{
if (device)
if(device)
{
vkDestroyDevice(device, null);
}
@ -3148,7 +3148,7 @@ Destroy(VkDevice device)
void
Destroy(VmaAllocator vma)
{
if (vma)
if(vma)
{
vmaDestroyAllocator(vma);
}
@ -3159,13 +3159,13 @@ Destroy(VkSwapchainKHR swapchain, ImageView[] views, VkDevice device)
{
foreach(view; views)
{
if (view.view)
if(view.view)
{
vkDestroyImageView(device, view.view, null);
}
}
if (swapchain)
if(swapchain)
{
vkDestroySwapchainKHR(device, swapchain, null);
}
@ -3174,12 +3174,12 @@ Destroy(VkSwapchainKHR swapchain, ImageView[] views, VkDevice device)
void
Destroy(Vulkan* vk, ImageView* view)
{
if (view.view)
if(view.view)
{
vkDestroyImageView(vk.device, view.view, null);
}
if (view.image)
if(view.image)
{
vmaDestroyImage(vk.vma, view.image, view.alloc);
}
@ -3188,25 +3188,25 @@ Destroy(Vulkan* vk, ImageView* view)
void
Destroy(VkDescriptorPool pool, VkDescriptorSetLayout[] layouts, VkPipelineLayout pipeline_layout, VkSampler sampler, VkDevice device)
{
if (sampler)
if(sampler)
{
vkDestroySampler(device, sampler, null);
}
if (pipeline_layout)
if(pipeline_layout)
{
vkDestroyPipelineLayout(device, pipeline_layout, null);
}
foreach(layout; layouts)
{
if (layout)
if(layout)
{
vkDestroyDescriptorSetLayout(device, layout, null);
}
}
if (pool)
if(pool)
{
vkDestroyDescriptorPool(device, pool, null);
}
@ -3215,39 +3215,39 @@ Destroy(VkDescriptorPool pool, VkDescriptorSetLayout[] layouts, VkPipelineLayout
void
DestroyFS(Vulkan* vk)
{
if (vk.imm_fence)
if(vk.imm_fence)
{
vkDestroyFence(vk.device, vk.imm_fence, null);
}
if (vk.imm_cmd)
if(vk.imm_cmd)
{
vkFreeCommandBuffers(vk.device, vk.imm_pool, 1, &vk.imm_cmd);
}
if (vk.imm_pool)
if(vk.imm_pool)
{
vkDestroyCommandPool(vk.device, vk.imm_pool, null);
}
if (vk.comp_cmd)
if(vk.comp_cmd)
{
vkFreeCommandBuffers(vk.device, vk.comp_cmd_pool, 1, &vk.comp_cmd);
}
if (vk.comp_cmd_pool)
if(vk.comp_cmd_pool)
{
vkDestroyCommandPool(vk.device, vk.comp_cmd_pool, null);
}
if (vk.comp_fence)
if(vk.comp_fence)
{
vkDestroyFence(vk.device, vk.comp_fence, null);
}
foreach(sem; vk.submit_sems)
{
if (sem)
if(sem)
{
vkDestroySemaphore(vk.device, sem, null);
}
@ -3255,22 +3255,22 @@ DestroyFS(Vulkan* vk)
foreach(i; 0 .. FRAME_OVERLAP)
{
if (vk.render_fences[i])
if(vk.render_fences[i])
{
vkDestroyFence(vk.device, vk.render_fences[i], null);
}
if (vk.cmd_pools[i])
if(vk.cmd_pools[i])
{
vkFreeCommandBuffers(vk.device, vk.cmd_pools[i], 1, &vk.cmds[i]);
}
if (vk.cmd_pools[i])
if(vk.cmd_pools[i])
{
vkDestroyCommandPool(vk.device, vk.cmd_pools[i], null);
}
if (vk.acquire_sems[i])
if(vk.acquire_sems[i])
{
vkDestroySemaphore(vk.device, vk.acquire_sems[i], null);
}
@ -3292,7 +3292,7 @@ InitInstance(Vulkan* vk)
foreach(i, layer; layers)
{
if (strcmp(cast(char*)&layer.layerName, "VK_LAYER_KHRONOS_validation") == 0)
if(strcmp(cast(char*)&layer.layerName, "VK_LAYER_KHRONOS_validation") == 0)
{
g_VLAYER_SUPPORT = true;
break;
@ -3330,7 +3330,7 @@ InitInstance(Vulkan* vk)
debug
{
if (g_VLAYER_SUPPORT && g_DEBUG_PRINTF)
if(g_VLAYER_SUPPORT && g_DEBUG_PRINTF)
{
instance_info.pNext = &validation_features;
}
@ -3381,11 +3381,11 @@ EnableVLayers(Vulkan* vk)
{
Push(vk, SI.Debug);
if (g_VLAYER_SUPPORT)
if(g_VLAYER_SUPPORT)
{
VkDebugUtilsMessageSeverityFlagBitsEXT severity_flags = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
if (g_DEBUG_PRINTF)
if(g_DEBUG_PRINTF)
{
severity_flags |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
}
@ -3399,13 +3399,13 @@ EnableVLayers(Vulkan* vk)
pfnUserCallback: cast(PFN_vkDebugUtilsMessengerCallbackEXT)&DebugCallback,
};
if (g_DEBUG_PRINTF)
if(g_DEBUG_PRINTF)
{
info.messageSeverity |= VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
}
VkResult result = vkCreateDebugUtilsMessengerEXT(vk.instance, &info, null, &vk.dbg_msg);
if (result != VK_SUCCESS)
if(result != VK_SUCCESS)
{
Logf("EnableVLayers failed to initialize, will continue without validation: %s", VkResultStr(result));
}
@ -3428,7 +3428,7 @@ PrintShaderDisassembly(Vulkan* vk, Pipeline pipeline_id, VkShaderStageFlagBits s
{
u64 size;
VkResult result = vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, null);
if (result == VK_SUCCESS)
if(result == VK_SUCCESS)
{
u8[] buf = AllocArray!(u8)(&vk.frame_arenas[vk.frame_index], size);
vkGetShaderInfoAMD(vk.device, pipeline.handle, stage, VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD, &size, buf.ptr);

View File

@ -133,14 +133,14 @@ LoadGlobalFunctions()
foreach(name; VULKAN_LIBS)
{
lib = LoadLibrary(name);
if (lib.ptr)
if(lib.ptr)
{
fn = LoadFunction(lib, "vkGetInstanceProcAddr");
vkGetInstanceProcAddr = cast(PFN_vkGetInstanceProcAddr)fn.ptr;
}
}
if (fn.ptr)
if(fn.ptr)
{
vkGetInstanceProcAddr = cast(PFN_vkGetInstanceProcAddr)vkGetInstanceProcAddr(null, "vkGetInstanceProcAddr");
assert(vkGetInstanceProcAddr != null, "LoadGlobalFunctions failure: Unable to load vkGetInstanceProcAddr");

View File

@ -15,11 +15,11 @@ DebugCallback(
{
string ms, mt;
if (callback_data.pMessage != null)
if(callback_data.pMessage != null)
{
const(char)[] msg = callback_data.pMessage[0 .. strlen(callback_data.pMessage)];
bool debug_printf = false;
if (callback_data.pMessageIdName != null)
if(callback_data.pMessageIdName != null)
{
debug_printf = (strcmp(callback_data.pMessageIdName, "WARNING-DEBUG-PRINTF") == 0);
}
@ -69,7 +69,7 @@ DebugCallback(
break;
}
if (debug_printf)
if(debug_printf)
{
writefln("SHADER PRINT: %s", msg);
}

View File

@ -138,7 +138,7 @@ struct SLList(T)
pragma(inline): void
Push(T)(SLList!(T)*list, Node!(T)* node, Node!(T)* nil)
{
if (CheckNil(nil, list.first))
if(CheckNil(nil, list.first))
{
list.first = list.last = node;
node.next = nil;
@ -154,7 +154,7 @@ Push(T)(SLList!(T)*list, Node!(T)* node, Node!(T)* nil)
pragma(inline): void
PushFront(T)(SLList!(T)*list, Node!(T)* node, Node!(T)* nil)
{
if (CheckNil(nil, list.first))
if(CheckNil(nil, list.first))
{
list.first = list.last = node;
node.next = nil;
@ -246,7 +246,7 @@ AllocAlign(Arena* arena, u64 size, u64 alignment)
uintptr current = mem_pos + arena.pos;
uintptr offset = AlignPow2(current, alignment) - mem_pos;
if (offset+size <= arena.length)
if(offset+size <= arena.length)
{
ptr = &arena.mem[offset];
arena.pos = offset+size;