remove bindless descriptors, delete unused shaders

This commit is contained in:
matthew 2025-08-09 16:25:18 +10:00
parent 92980fba0d
commit f4a8cef602
10 changed files with 177 additions and 668 deletions

Binary file not shown.

View File

@ -14,10 +14,16 @@ f32 g_DELTA;
struct UIVertex struct UIVertex
{ {
Vec2 p0; Vec2 dst_start;
Vec2 p1; Vec2 dst_end;
Vec2 src_start;
Vec2 src_end;
Vec4 col; Vec4 col;
u32 texture; }
struct UIPushConst
{
Vec2 res;
} }
struct Vertex struct Vertex
@ -62,6 +68,9 @@ struct Game
PlatformWindow* window; PlatformWindow* window;
Pipeline ui_pipeline; Pipeline ui_pipeline;
DescSetLayout ui_desc_layout;
DescSet ui_desc_set;
PipelineLayout ui_layout;
UIVertex[] ui_vertex_buf; UIVertex[] ui_vertex_buf;
u32[] ui_index_buf; u32[] ui_index_buf;
@ -70,7 +79,6 @@ struct Game
ImageView draw_image, depth_image; ImageView draw_image, depth_image;
GlobalUniforms globals; GlobalUniforms globals;
PushConst pc;
Timer timer; Timer timer;
} }
@ -93,27 +101,31 @@ InitGame(PlatformWindow* window)
UVec2 ext = GetExtent(&g.rd); UVec2 ext = GetExtent(&g.rd);
CreateImageView(&g.rd, &g.aux_image, ext.x, ext.y, FMT.R_U32, IU.Storage); DescLayoutBinding[] layout_bindings = [
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
];
g.ui_desc_layout = CreateDescSetLayout(&g.rd, layout_bindings);
g.ui_desc_set = AllocDescSet(&g.rd, g.ui_desc_layout);
g.ui_layout = CreatePipelineLayout(&g.rd, g.ui_desc_layout, UIPushConst.sizeof);
GfxPipelineInfo ui_info = { GfxPipelineInfo ui_info = {
vertex_shader: "shaders/gui.vert.spv", vertex_shader: "shaders/gui.vert.spv",
frag_shader: "shaders/gui.frag.spv", frag_shader: "shaders/gui.frag.spv",
input_rate: IR.Instance, input_rate: IR.Instance,
input_rate_stride: UIVertex.sizeof, input_rate_stride: UIVertex.sizeof,
layout: g.ui_layout,
vertex_attributes: [ vertex_attributes: [
{ binding: 0, location: 0, format: FMT.RG_F32, offset: 0 }, { binding: 0, location: 0, format: FMT.RG_F32, offset: UIVertex.dst_start.offsetof },
{ binding: 0, location: 1, format: FMT.RG_F32, offset: UIVertex.p1.offsetof }, { binding: 0, location: 1, format: FMT.RG_F32, offset: UIVertex.dst_end.offsetof },
{ binding: 0, location: 2, format: FMT.RGBA_F32, offset: UIVertex.col.offsetof }, { binding: 0, location: 2, format: FMT.RG_F32, offset: UIVertex.src_start.offsetof },
{ binding: 0, location: 3, format: FMT.UINT, offset: UIVertex.texture.offsetof }, { binding: 0, location: 3, format: FMT.RG_F32, offset: UIVertex.src_end.offsetof },
{ binding: 0, location: 4, format: FMT.RGBA_F32, offset: UIVertex.col.offsetof },
], ],
}; };
g.ui_pipeline = CreateGraphicsPipeline(&g.rd, &ui_info); g.ui_pipeline = CreateGraphicsPipeline(&g.rd, &ui_info);
assert(g.aux_image.view != null);
UpdateAuxImage(&g.rd, &g.aux_image);
Reset(&g.frame_arena); Reset(&g.frame_arena);
return g; return g;
@ -142,41 +154,23 @@ Cycle(Game* g)
Reset(&g.frame_arena); Reset(&g.frame_arena);
ProcessInputs(g, &g.camera); //ProcessInputs(g, &g.camera);
ResizeDrawImageIfNeeded(&g.rd, &g.aux_image);
UpdateAuxImage(&g.rd, &g.aux_image);
//Sort(g, g.camera.pos, &g.model); //Sort(g, g.camera.pos, &g.model);
//Update(g, &g.camera); //Update(g, &g.camera);
Update(&g.camera); //Update(&g.camera);
BeginFrame(&g.rd); BeginFrame(&g.rd);
PrepAuxImage(&g.rd, &g.aux_image);
Bind(&g.rd, &g.compute_pipeline);
ClearColor(&g.rd, &g.aux_image, Vec4(0.0));
ImageBarrier(&g.rd);
UVec2 ext = GetExtent(&g.rd); UVec2 ext = GetExtent(&g.rd);
f32 aspect = (cast(f32)ext.x) / (cast(f32)ext.y); f32 aspect = (cast(f32)ext.x) / (cast(f32)ext.y);
Mat4 projection = Perspective(90.0, aspect, 10000.0, 0.1); Mat4 projection = Perspective(90.0, aspect, 10000.0, 0.1);
g.globals.projection_matrix = projection;
g.globals.view_matrix = ViewMatrix(&g.camera);
g.globals.res.x = ext.x;
g.globals.res.y = ext.y;
SetUniform(&g.rd, &g.globals);
BeginRendering(&g.rd); BeginRendering(&g.rd);
Bind(&g.rd, &g.ui_pipeline); Bind(&g.rd, g.ui_pipeline, g.ui_desc_set);
BindUIBuffers(&g.rd); BindUIBuffers(&g.rd);
@ -196,12 +190,8 @@ pragma(inline): void
DrawModel(Game* g, Model* model) DrawModel(Game* g, Model* model)
{ {
BindBuffers(&g.rd, &model.index_buffer, &model.vertex_buffer); BindBuffers(&g.rd, &model.index_buffer, &model.vertex_buffer);
g.pc.model_matrix = Mat4Identity();
Translate(&g.pc.model_matrix, model.pos);
foreach(i, part; model.parts) foreach(i, part; model.parts)
{ {
g.pc.mat_id = part.mat;
PushConstants(&g.rd, &g.pc);
DrawIndexed(&g.rd, part.length, 1, part.offset); DrawIndexed(&g.rd, part.length, 1, part.offset);
} }
} }
@ -211,7 +201,7 @@ Destroy(Game* g)
{ {
WaitIdle(&g.rd); WaitIdle(&g.rd);
Destroy(&g.rd, &g.ui_pipeline); Destroy(&g.rd, g.ui_pipeline);
Destroy(&g.rd); Destroy(&g.rd);
} }
@ -318,10 +308,10 @@ Sort(Game* g, Vec3 pos, Model* model)
void void
DrawRect(Game* g, f32 p0_x, f32 p0_y, f32 p1_x, f32 p1_y, Vec4 col) DrawRect(Game* g, f32 p0_x, f32 p0_y, f32 p1_x, f32 p1_y, Vec4 col)
{ {
g.ui_vertex_buf[g.ui_count].p0.x = p0_x; g.ui_vertex_buf[g.ui_count].dst_start.x = p0_x;
g.ui_vertex_buf[g.ui_count].p0.y = p0_y; g.ui_vertex_buf[g.ui_count].dst_start.y = p0_y;
g.ui_vertex_buf[g.ui_count].p1.x = p1_x; g.ui_vertex_buf[g.ui_count].dst_end.x = p1_x;
g.ui_vertex_buf[g.ui_count].p1.y = p1_y; g.ui_vertex_buf[g.ui_count].dst_end.y = p1_y;
g.ui_vertex_buf[g.ui_count].col = col; g.ui_vertex_buf[g.ui_count].col = col;
u32 index_count = g.ui_count * 6; u32 index_count = g.ui_count * 6;
@ -336,6 +326,7 @@ DrawRect(Game* g, f32 p0_x, f32 p0_y, f32 p1_x, f32 p1_y, Vec4 col)
g.ui_count += 1; g.ui_count += 1;
} }
// TODO: integrate this with vulkan again
Model Model
LoadModel(Game* g, string name) LoadModel(Game* g, string name)
{ {
@ -359,7 +350,7 @@ LoadModel(Game* g, string name)
const(char)[] tex_name = m3d.texture[i].name[0 .. strlen(m3d.texture[i].name)]; const(char)[] tex_name = m3d.texture[i].name[0 .. strlen(m3d.texture[i].name)];
CreateImageView(&g.rd, &model.textures[i], w, h, ch, tex_data); CreateImageView(&g.rd, &model.textures[i], w, h, ch, tex_data);
tex_lookup[i] = Pop(&g.rd, DT.SampledImage); //tex_lookup[i] = Pop(&g.rd, DT.SampledImage);
} }
Material[] mats = AllocArray!(Material)(&g.frame_arena, m3d.nummaterial); Material[] mats = AllocArray!(Material)(&g.frame_arena, m3d.nummaterial);
@ -405,7 +396,7 @@ LoadModel(Game* g, string name)
CreateBuffer(&g.rd, &model.materials[i], BT.Uniform, Material.sizeof, false); CreateBuffer(&g.rd, &model.materials[i], BT.Uniform, Material.sizeof, false);
assert(Transfer(&g.rd, &model.materials[i], &mats[i]), "LoadModel failure: Transfer error when transferring material"); assert(Transfer(&g.rd, &model.materials[i], &mats[i]), "LoadModel failure: Transfer error when transferring material");
mat_lookup[i] = Pop(&g.rd, DT.Material); //mat_lookup[i] = Pop(&g.rd, DT.Material);
} }
u32 mesh_count = 0; u32 mesh_count = 0;
@ -536,8 +527,8 @@ LoadModel(Game* g, string name)
assert(Transfer(&g.rd, &model.vertex_buffer, vertices), "LoadModel failure: Unable to transfer vertex buffer"); assert(Transfer(&g.rd, &model.vertex_buffer, vertices), "LoadModel failure: Unable to transfer vertex buffer");
assert(Transfer(&g.rd, &model.index_buffer, indices), "LoadModel failure: Unable to transfer index buffer"); assert(Transfer(&g.rd, &model.index_buffer, indices), "LoadModel failure: Unable to transfer index buffer");
WriteDescriptors(&g.rd, DT.Material, model.materials, mat_lookup); //WriteDescriptors(&g.rd, DT.Material, model.materials, mat_lookup);
WriteDescriptors(&g.rd, DT.SampledImage, model.textures, tex_lookup); //WriteDescriptors(&g.rd, DT.SampledImage, model.textures, tex_lookup);
model.positions = positions; model.positions = positions;
model.indices = indices; model.indices = indices;

View File

@ -24,12 +24,7 @@ alias RenderPass = VkRenderPass;
alias DescSet = VkDescriptorSet; alias DescSet = VkDescriptorSet;
alias DescSetLayout = VkDescriptorSetLayout; alias DescSetLayout = VkDescriptorSetLayout;
alias PipelineLayout = VkPipelineLayout; alias PipelineLayout = VkPipelineLayout;
alias DescLayoutBinding = VkDescriptorSetLayoutBinding;
struct DescLayoutBinding
{
VkDescriptorSetLayoutBinding binding;
bool dynamic;
}
bool g_VLAYER_SUPPORT = false; bool g_VLAYER_SUPPORT = false;
bool g_DEBUG_PRINTF = false; bool g_DEBUG_PRINTF = false;
@ -233,6 +228,7 @@ struct GfxPipelineInfo
Specialization vert_spec; Specialization vert_spec;
Specialization frag_spec; Specialization frag_spec;
bool self_dependency; bool self_dependency;
PipelineLayout layout;
} }
struct CompPipelineInfo struct CompPipelineInfo
@ -242,12 +238,6 @@ struct CompPipelineInfo
PipelineLayout layout; PipelineLayout layout;
} }
struct PushConst
{
Mat4 model_matrix;
u32 mat_id;
}
enum StepInitialized : u32 enum StepInitialized : u32
{ {
Renderer = 1, Renderer = 1,
@ -268,9 +258,7 @@ alias SI = StepInitialized;
enum DescType : u32 enum DescType : u32
{ {
Shared = 0,
SampledImage,
Material,
Max, Max,
} }
@ -348,7 +336,9 @@ struct Vulkan
PipelineHandles[] pipeline_handles; PipelineHandles[] pipeline_handles;
u32 pipeline_count; u32 pipeline_count;
Pipeline[FRAME_OVERLAP] last_pipeline; VkPipeline[FRAME_OVERLAP] last_pipeline;
DescSetLayout global_set_layout;
DescSet global_set;
MappedBuffer!(u8) transfer_buf; MappedBuffer!(u8) transfer_buf;
MappedBuffer!(UIVertex) ui_vert_buf; MappedBuffer!(UIVertex) ui_vert_buf;
@ -385,6 +375,7 @@ struct PipelineHandles
VkPipeline handle; VkPipeline handle;
VkPipelineBindPoint type; VkPipelineBindPoint type;
VkPipelineLayout layout; VkPipelineLayout layout;
Pipeline index;
} }
struct QueueInfo struct QueueInfo
@ -426,10 +417,11 @@ Init(PlatformWindow* window, u64 permanent_mem, u64 frame_mem)
if (success) success = CreateSwapchain(&vk); if (success) success = CreateSwapchain(&vk);
if (success) success = CreateDrawImages(&vk); if (success) success = CreateDrawImages(&vk);
if (success) success = InitFrameStructures(&vk); if (success) success = InitFrameStructures(&vk);
if (success) success = InitDescriptors(&vk); if (success) InitDescriptors(&vk);
if (success) success = InitGlobalDescSet(&vk);
if (success) InitPipelines(&vk); if (success) InitPipelines(&vk);
if (success) InitBuffers(&vk); if (success) InitBuffers(&vk);
if (success) success = InitConversionPipeline(&vk); if (success) InitConversionPipeline(&vk);
if (success) InitFramebufferAndRenderPass(&vk); if (success) InitFramebufferAndRenderPass(&vk);
assert(success, "Error initializing vulkan"); assert(success, "Error initializing vulkan");
@ -476,7 +468,7 @@ AllocDescSet(Vulkan* vk, DescSetLayout layout)
{ {
PushDescriptorPool(vk); PushDescriptorPool(vk);
alloc_info = vk.active_pool; alloc_info.descriptorPool = vk.active_pool;
result = vkAllocateDescriptorSets(vk.device, &alloc_info, &set); result = vkAllocateDescriptorSets(vk.device, &alloc_info, &set);
VkCheckA("vkAllocateDescriptorSets failure", result); // TODO: look more into how to handle this VkCheckA("vkAllocateDescriptorSets failure", result); // TODO: look more into how to handle this
@ -486,10 +478,29 @@ AllocDescSet(Vulkan* vk, DescSetLayout layout)
} }
PipelineLayout PipelineLayout
CreatePipelineLayout(Vulkan* vk, DescSetLayout[] layouts, u64 push_const_size, bool compute = false) CreatePipelineLayout(T)(Vulkan* vk, T layouts, u32 push_const_size, bool compute = false) if (is(T: DescSetLayout) || is(T: DescSetLayout[]))
{ {
VkShaderStageFlagBits stage = (compute ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT); VkShaderStageFlagBits stage = (compute ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT);
DescSetLayout[] desc_layouts;
static if (is(T: DescSetLayout))
{
desc_layouts = AllocArray!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], 2);
desc_layouts[0] = vk.global_set_layout;
desc_layouts[1] = layouts;
}
else static if (is(T: DescSetLayout[]))
{
desc_layouts = AllocArray!(DescSetLayout)(&vk.frame_arenas[vk.frame_index], layouts.length + 1);
desc_layouts[0] = vk.global_set_layout;
foreach(i; 1 .. layouts.length)
{
desc_layouts[i] = layouts[i-1];
}
}
VkPushConstantRange const_range = { VkPushConstantRange const_range = {
offset: 0, offset: 0,
size: cast(VkDeviceSize)push_const_size, size: cast(VkDeviceSize)push_const_size,
@ -498,8 +509,8 @@ CreatePipelineLayout(Vulkan* vk, DescSetLayout[] layouts, u64 push_const_size, b
VkPipelineLayoutCreateInfo layout_info = { VkPipelineLayoutCreateInfo layout_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
setLayoutCount: cast(u32)layouts.length, setLayoutCount: cast(u32)desc_layouts.length,
pSetLayouts: layouts.ptr, pSetLayouts: desc_layouts.ptr,
pushConstantRangeCount: (push_const_size > 0 ? 1 : 0), pushConstantRangeCount: (push_const_size > 0 ? 1 : 0),
pPushConstantRanges: (push_const_size > 0 ? &const_range : null), pPushConstantRanges: (push_const_size > 0 ? &const_range : null),
}; };
@ -511,19 +522,19 @@ CreatePipelineLayout(Vulkan* vk, DescSetLayout[] layouts, u64 push_const_size, b
return layout; return layout;
} }
bool void
InitConversionPipeline(Vulkan* vk) InitConversionPipeline(Vulkan* vk)
{ {
Push(vk, SI.Pipelines); Push(vk, SI.Pipelines);
VkDescriptorSetLayoutBinding[] layout_bindings = [ VkDescriptorSetLayoutBinding[] layout_bindings = [
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT }, { binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT },
{ binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT }, { binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_COMPUTE_BIT },
]; ];
vk.conv_desc_layout = CreateDescSetLayout(vk, layout_bindings); vk.conv_desc_layout = CreateDescSetLayout(vk, layout_bindings);
vk.conv_desc_set = AllocDescSet(vk, vk.conv_desc_layout); vk.conv_desc_set = AllocDescSet(vk, vk.conv_desc_layout);
vk.conv_pipeline_layout = CreatePipelineLayout(vk, [vk.conv_desc_layout], ConvPushConst.sizeof, true); vk.conv_pipeline_layout = CreatePipelineLayout(vk, vk.conv_desc_layout, ConvPushConst.sizeof, true);
u32 channels = 1; u32 channels = 1;
CompPipelineInfo conv_info = { CompPipelineInfo conv_info = {
@ -549,8 +560,6 @@ InitConversionPipeline(Vulkan* vk)
channels = 3; channels = 3;
vk.rgb_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info); vk.rgb_to_rgba_pipeline = CreateComputePipeline(vk, &conv_info);
return success;
} }
void void
@ -1133,14 +1142,14 @@ CreateImageView(Vulkan* vk, ImageView* view, u32 w, u32 h, Format format, ImageU
} }
void void
PushConstants(Vulkan* vk, PipelineLayout layout, PushConst* pc) PushConstants(T)(Vulkan* vk, PipelineLayout layout, T* pc)
{ {
vkCmdPushConstants( vkCmdPushConstants(
vk.cmds[vk.frame_index], vk.cmds[vk.frame_index],
layout, layout,
VK_SHADER_STAGE_VERTEX_BIT|VK_SHADER_STAGE_FRAGMENT_BIT|VK_SHADER_STAGE_COMPUTE_BIT, VK_SHADER_STAGE_VERTEX_BIT|VK_SHADER_STAGE_FRAGMENT_BIT|VK_SHADER_STAGE_COMPUTE_BIT,
0, 0,
PushConst.sizeof, T.sizeof,
pc pc
); );
} }
@ -1364,6 +1373,17 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets)
pipeline.type, pipeline.type,
pipeline.layout, pipeline.layout,
0, 0,
1,
&vk.global_set,
0,
null
);
vkCmdBindDescriptorSets(
vk.cmds[vk.frame_index],
pipeline.type,
pipeline.layout,
1,
cast(u32)sets.length, cast(u32)sets.length,
sets.ptr, sets.ptr,
0, 0,
@ -1371,6 +1391,16 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet[] sets)
); );
} }
PipelineHandles*
NewPipeline(Vulkan* vk)
{
PipelineHandles* pipeline = vk.pipeline_handles.ptr + vk.pipeline_count;
pipeline.index = vk.pipeline_count;
vk.pipeline_count += 1;
return pipeline;
}
void void
Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set) Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set)
{ {
@ -1384,6 +1414,17 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set)
pipeline.layout, pipeline.layout,
0, 0,
1, 1,
&vk.global_set,
0,
null
);
vkCmdBindDescriptorSets(
vk.cmds[vk.frame_index],
pipeline.type,
pipeline.layout,
1,
1,
&set, &set,
0, 0,
null null
@ -1393,10 +1434,15 @@ Bind(Vulkan* vk, Pipeline pipeline_handle, DescSet set)
pragma(inline): void pragma(inline): void
BindPipeline(Vulkan* vk, PipelineHandles* pipeline) BindPipeline(Vulkan* vk, PipelineHandles* pipeline)
{ {
if (vk.last_pipeline[vk.frame_index] == 0 || vk.last_pipeline[vk.frame_index] != pipeline_handle) if
(
vk.last_pipeline[vk.frame_index] == null ||
vk.last_pipeline[vk.frame_index] == VK_NULL_HANDLE ||
vk.last_pipeline[vk.frame_index] != pipeline.handle
)
{ {
vkCmdBindPipeline(vk.cmds[vk.frame_index], pipeline.type, pipeline.handle); vkCmdBindPipeline(vk.cmds[vk.frame_index], pipeline.type, pipeline.handle);
vk.last_pipeline[vk.frame_index] = pipeline_handle; vk.last_pipeline[vk.frame_index] = pipeline.handle;
VkViewport viewport = { VkViewport viewport = {
x: 0.0, x: 0.0,
@ -1562,7 +1608,9 @@ InitFramebufferAndRenderPass(Vulkan* vk)
Pipeline Pipeline
CreateGraphicsPipeline(Vulkan* vk, GfxPipelineInfo* build_info) CreateGraphicsPipeline(Vulkan* vk, GfxPipelineInfo* build_info)
{ {
PipelineHandle pipeline = { type: VK_PIPELINE_BIND_POINT_GRAPHICS }; PipelineHandles* pipeline = NewPipeline(vk);
pipeline.type = VK_PIPELINE_BIND_POINT_GRAPHICS;
pipeline.layout = build_info.layout;
VkDynamicState[] dyn_state = [ VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR ]; VkDynamicState[] dyn_state = [ VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR ];
@ -1723,12 +1771,13 @@ CreateGraphicsPipeline(Vulkan* vk, GfxPipelineInfo* build_info)
stageCount: cast(u32)shader_info.length, stageCount: cast(u32)shader_info.length,
pStages: shader_info.ptr, pStages: shader_info.ptr,
renderPass: vk.render_pass, renderPass: vk.render_pass,
layout: build_info.layout,
}; };
VkResult result = vkCreateGraphicsPipelines(vk.device, null, 1, &create_info, null, &pipeline.handle); VkResult result = vkCreateGraphicsPipelines(vk.device, null, 1, &create_info, null, &pipeline.handle);
assert(VkCheck("CreateGraphicsPipeline failure", result), "Unable to build pipeline"); assert(VkCheck("CreateGraphicsPipeline failure", result), "Unable to build pipeline");
return pipeline; return pipeline.index;
} }
Pipeline Pipeline
@ -1765,12 +1814,11 @@ CreateComputePipeline(Vulkan* vk, CompPipelineInfo* comp_info)
info.stage.pSpecializationInfo = &spec_info; info.stage.pSpecializationInfo = &spec_info;
} }
PipelineHandles* pipeline = vk.pipeline_handles.ptr + vk.pipeline_count; PipelineHandles* pipeline = NewPipeline(vk);
pipeline.type = VK_PIPELINE_BIND_POINT_COMPUTE; pipeline.type = VK_PIPELINE_BIND_POINT_COMPUTE;
pipeline.layout = comp_info.layout; pipeline.layout = comp_info.layout;
Pipeline pipeline_handle = vk.pipeline_count; Pipeline pipeline_handle = pipeline.index;
vk.pipeline_count += 1;
VkResult result = vkCreateComputePipelines(vk.device, null, 1, &info, null, &pipeline.handle); VkResult result = vkCreateComputePipelines(vk.device, null, 1, &info, null, &pipeline.handle);
assert(VkCheck("CreateComputePipeline failure", result), "Unable to build pipeline"); assert(VkCheck("CreateComputePipeline failure", result), "Unable to build pipeline");
@ -1797,28 +1845,6 @@ ClearColor(Vulkan* vk, ImageView* view, Vec4 color)
); );
} }
void
SetUniform(Vulkan* vk, GlobalUniforms* globals)
{
vk.global_buf.data[0] = *globals;
VkDescriptorBufferInfo buffer_info = {
buffer: vk.global_buf.buffer,
range: GlobalUniforms.sizeof,
};
VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared],
dstBinding: 0,
descriptorType: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
descriptorCount: 1,
pBufferInfo: &buffer_info,
};
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
}
void void
WaitIdle(Vulkan* vk) WaitIdle(Vulkan* vk)
{ {
@ -1832,9 +1858,9 @@ Destroy(Vulkan* vk, Shader shader)
} }
void void
Destroy(Vulkan* vk, Pipeline* pipeline) Destroy(Vulkan* vk, Pipeline pipeline)
{ {
vkDestroyPipeline(vk.device, pipeline.handle, null); vkDestroyPipeline(vk.device, vk.pipeline_handles[pipeline].handle, null);
} }
void void
@ -1927,19 +1953,19 @@ DestroyPipelines(Vulkan* vk)
vkDestroyDescriptorSetLayout(vk.device, vk.conv_desc_layout, null); vkDestroyDescriptorSetLayout(vk.device, vk.conv_desc_layout, null);
} }
if (vk.r_to_rgba_pipeline.handle) if (vk.r_to_rgba_pipeline)
{ {
vkDestroyPipeline(vk.device, vk.r_to_rgba_pipeline.handle, null); vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.r_to_rgba_pipeline].handle, null);
} }
if (vk.rg_to_rgba_pipeline.handle) if (vk.rg_to_rgba_pipeline)
{ {
vkDestroyPipeline(vk.device, vk.rg_to_rgba_pipeline.handle, null); vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.rg_to_rgba_pipeline].handle, null);
} }
if (vk.rgb_to_rgba_pipeline.handle) if (vk.rgb_to_rgba_pipeline)
{ {
vkDestroyPipeline(vk.device, vk.rgb_to_rgba_pipeline.handle, null); vkDestroyPipeline(vk.device, vk.pipeline_handles[vk.rgb_to_rgba_pipeline].handle, null);
} }
} }
@ -1956,13 +1982,11 @@ Destroy(Vulkan* vk, Buffer* buf)
vmaDestroyBuffer(vk.vma, buf.buffer, buf.alloc); vmaDestroyBuffer(vk.vma, buf.buffer, buf.alloc);
} }
bool void
InitDescriptors(Vulkan* vk) InitDescriptors(Vulkan* vk)
{ {
Push(vk, SI.DescriptorPools); Push(vk, SI.DescriptorPools);
bool success = true;
PushDescriptorPool(vk); PushDescriptorPool(vk);
} }
@ -1987,135 +2011,22 @@ PushDescriptorPool(Vulkan* vk)
VkDescriptorPool pool; VkDescriptorPool pool;
VkResult result = vkCreateDescriptorPool(vk.device, &pool_info, null, &pool); VkResult result = vkCreateDescriptorPool(vk.device, &pool_info, null, &pool);
success = VkCheck("vkCreateDescriptorPool failure", result); bool success = VkCheck("vkCreateDescriptorPool failure", result);
assert(success, "vkCreateDescriptorPool error"); assert(success, "vkCreateDescriptorPool error");
if (vk.active_pool == null || vk.active_pool == VK_NULL_HANDLE) if (vk.active_pool == null || vk.active_pool == VK_NULL_HANDLE)
{ {
Node!(VkDescriptorPool)* node = Alloc!(Node!(VkDescriptorPool)); Node!(VkDescriptorPool)* node = Alloc!(Node!(VkDescriptorPool));
node.value = vk.active_pool; node.value = vk.active_pool;
PushFront(&g.full_pools, node, null); PushFront(&vk.full_pools, node, null);
} }
vk.active_pool = pool; vk.active_pool = pool;
} }
bool bool
InitDescriptors(Vulkan* vk) InitGlobalDescSet(Vulkan* vk)
{ {
if (success)
{
VkDescriptorSetLayoutBinding[] shared_bindings = [
{ binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 2, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 3, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 4, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 5, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
{ binding: 6, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
];
VkDescriptorBindingFlags[] shared_binding_flags = AllocArray!(VkDescriptorBindingFlags)(&vk.frame_arenas[0], shared_bindings.length);
shared_binding_flags[] = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT;
VkDescriptorSetLayoutBindingFlagsCreateInfo shared_flag_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
bindingCount: cast(u32)shared_binding_flags.length,
pBindingFlags: shared_binding_flags.ptr,
};
VkDescriptorSetLayoutCreateInfo shared_set_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
pNext: &shared_flag_info,
bindingCount: cast(u32)shared_bindings.length,
pBindings: shared_bindings.ptr,
flags: VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
};
result = vkCreateDescriptorSetLayout(vk.device, &shared_set_info, null, &vk.desc_layouts[DT.Shared]);
success = VkCheck("vkCreateDescriptorSetLayout failure", result);
}
if (success)
{
VkDescriptorType[DT.max] type_lookup = [
DT.SampledImage: VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
DT.Material: VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
];
VkDescriptorBindingFlags bindless_flag = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT | VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
VkDescriptorSetLayoutBindingFlagsCreateInfo bindless_flag_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
bindingCount: 1,
pBindingFlags: &bindless_flag,
};
VkDescriptorSetLayoutBinding binding = {
binding: 0,
descriptorCount: 1024,
stageFlags: VK_SHADER_STAGE_ALL,
};
VkDescriptorSetLayoutCreateInfo bindless_set_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
pNext: &bindless_flag_info,
bindingCount: 1,
pBindings: &binding,
flags: VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
};
foreach(i; cast(u64)(DT.Shared+1) .. DT.max)
{
binding.descriptorType = type_lookup[i];
if (success)
{
result = vkCreateDescriptorSetLayout(vk.device, &bindless_set_info, null, vk.desc_layouts.ptr + i);
success = VkCheck("vkCreateDescriptorSetLayout failure", result);
}
}
}
if (success)
{
VkDescriptorSetAllocateInfo alloc_info = {
sType: VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
descriptorSetCount: cast(u32)DT.max,
pSetLayouts: vk.desc_layouts.ptr,
descriptorPool: vk.desc_pool,
};
result = vkAllocateDescriptorSets(vk.device, &alloc_info, vk.desc_sets.ptr);
success = VkCheck("vkAllocateDescriptorSets failure", result);
}
if (success)
{
VkPushConstantRange const_range = {
offset: 0,
size: cast(VkDeviceSize)PushConst.sizeof,
stageFlags: VK_SHADER_STAGE_COMPUTE_BIT | VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_VERTEX_BIT,
};
VkPipelineLayoutCreateInfo layout_info = {
sType: VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
setLayoutCount: cast(u32)DT.max,
pushConstantRangeCount: 1,
pPushConstantRanges: &const_range,
pSetLayouts: vk.desc_layouts.ptr,
};
result = vkCreatePipelineLayout(vk.device, &layout_info, null, &vk.pipeline_layout);
success = VkCheck("vkCreatePipelineLayout failure", result);
}
if (success)
{
VkPhysicalDeviceProperties props; VkPhysicalDeviceProperties props;
vkGetPhysicalDeviceProperties(vk.physical_device, &props); vkGetPhysicalDeviceProperties(vk.physical_device, &props);
@ -2133,116 +2044,39 @@ InitDescriptors(Vulkan* vk)
mipmapMode: VK_SAMPLER_MIPMAP_MODE_LINEAR, mipmapMode: VK_SAMPLER_MIPMAP_MODE_LINEAR,
}; };
result = vkCreateSampler(vk.device, &sampler_info, null, &vk.nearest_sampler); VkResult result = vkCreateSampler(vk.device, &sampler_info, null, &vk.nearest_sampler);
success = VkCheck("vkCreateSampler failure", result); bool success = VkCheck("vkCreateSampler failure", result);
}
if (success) if (success)
{ {
VkSamplerCreateInfo sampler_info = { DescLayoutBinding[] layout_bindings = [
sType: VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, { binding: 0, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
magFilter: VK_FILTER_LINEAR, { binding: 1, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER, descriptorCount: 1, stageFlags: VK_SHADER_STAGE_ALL },
minFilter: VK_FILTER_LINEAR, ];
addressModeU: VK_SAMPLER_ADDRESS_MODE_REPEAT,
addressModeV: VK_SAMPLER_ADDRESS_MODE_REPEAT,
addressModeW: VK_SAMPLER_ADDRESS_MODE_REPEAT,
anisotropyEnable: VK_FALSE,
compareEnable: VK_FALSE,
borderColor: VK_BORDER_COLOR_INT_OPAQUE_BLACK,
mipmapMode: VK_SAMPLER_MIPMAP_MODE_NEAREST,
unnormalizedCoordinates: VK_FALSE,
};
result = vkCreateSampler(vk.device, &sampler_info, null, &vk.oit_sampler); vk.global_set_layout = CreateDescSetLayout(vk, layout_bindings);
success = VkCheck("vkCreateSampler failure", result); vk.global_set = AllocDescSet(vk, vk.global_set_layout);
}
if (success) WriteDrawImageDesc(vk);
{
// TODO: move this elsewhere later
// Also add samples/supersampling scaling
CreateBufferView(vk, &vk.a_buffer_view, (u32.sizeof * 2) * vk.swapchain_extent.width * vk.swapchain_extent.height, FMT.RG_U32); VkDescriptorImageInfo sampler_desc_info = {
VkDescriptorBufferInfo a_buffer_info = {
buffer: vk.a_buffer_view.buffer,
offset: 0,
range: VK_WHOLE_SIZE,
};
VkDescriptorImageInfo aux_info = {
sampler: vk.oit_sampler,
imageView: vk.aux_image.view,
imageLayout: VK_IMAGE_LAYOUT_GENERAL,
};
VkDescriptorImageInfo sampler_info = {
sampler: vk.nearest_sampler, sampler: vk.nearest_sampler,
}; };
VkDescriptorImageInfo oit_sample_info = {
sampler: vk.oit_sampler,
};
VkWriteDescriptorSet[] writes = [
{
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared],
dstBinding: 3,
descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER,
pImageInfo: &sampler_info,
},
{
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared],
dstBinding: 4,
descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
pBufferInfo: &a_buffer_info,
pTexelBufferView: &vk.a_buffer_view.view,
},
{
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared],
dstBinding: 6,
descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER,
pImageInfo: &oit_sample_info,
},
];
vkUpdateDescriptorSets(vk.device, cast(u32)writes.length, writes.ptr, 0, null);
WriteDrawImageDesc(vk);
}
return success;
}
// TODO: better descriptor updating
void
UpdateAuxImage(Vulkan* vk, ImageView* view)
{
VkDescriptorImageInfo aux_info = {
sampler: vk.oit_sampler,
imageView: view.view,
imageLayout: VK_IMAGE_LAYOUT_GENERAL,
};
assert(view.view != VK_NULL_HANDLE);
VkWriteDescriptorSet write = { VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared], dstSet: vk.global_set,
dstBinding: 5, dstBinding: 1,
descriptorCount: 1, descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorType: VK_DESCRIPTOR_TYPE_SAMPLER,
pImageInfo: &aux_info, pImageInfo: &sampler_desc_info,
}; };
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null); vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
}
return success;
} }
void void
@ -2255,8 +2089,8 @@ WriteDrawImageDesc(Vulkan* vk)
VkWriteDescriptorSet write = { VkWriteDescriptorSet write = {
sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, sType: VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
dstSet: vk.desc_sets[DT.Shared], dstSet: vk.global_set,
dstBinding: 2, dstBinding: 0,
descriptorCount: 1, descriptorCount: 1,
descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, descriptorType: VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
pImageInfo: &draw_image_info, pImageInfo: &draw_image_info,
@ -2305,56 +2139,6 @@ WriteConvDescriptor(Vulkan* vk, ImageView* view)
vkUpdateDescriptorSets(vk.device, 1, &write, 0, null); vkUpdateDescriptorSets(vk.device, 1, &write, 0, null);
} }
void
WriteDescriptors(Vulkan* vk, DescType type, Buffer[] buffers, u32[] indices)
{
assert(buffers.length > 0, "WriteDescriptors failure: Buffer length is 0");
VkDescriptorBufferInfo[] buffer_info = AllocArray!(VkDescriptorBufferInfo)(&vk.frame_arenas[vk.frame_index], buffers.length);
VkWriteDescriptorSet[] writes = AllocArray!(VkWriteDescriptorSet)(&vk.frame_arenas[vk.frame_index], buffers.length);
foreach(i, buf; buffers)
{
buffer_info[i].buffer = buf.buffer;
buffer_info[i].range = buf.size;
writes[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[i].dstSet = vk.desc_sets[type];
writes[i].dstArrayElement = indices[i];
writes[i].dstBinding = 0;
writes[i].descriptorCount = 1;
writes[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
writes[i].pBufferInfo = buffer_info.ptr + i;
}
vkUpdateDescriptorSets(vk.device, cast(u32)writes.length, writes.ptr, 0, null);
}
void
WriteDescriptors(Vulkan* vk, DescType type, ImageView[] views, u32[] indices)
{
assert(views.length > 0, "WriteDescriptors failure: ImageView length is 0");
VkDescriptorImageInfo[] image_info = AllocArray!(VkDescriptorImageInfo)(&vk.frame_arenas[vk.frame_index], views.length);
VkWriteDescriptorSet[] writes = AllocArray!(VkWriteDescriptorSet)(&vk.frame_arenas[vk.frame_index], views.length);
foreach(i, view; views)
{
image_info[i].imageView = view.view;
image_info[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
writes[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[i].dstSet = vk.desc_sets[type];
writes[i].dstArrayElement = indices[i];
writes[i].dstBinding = 0;
writes[i].descriptorCount = 1;
writes[i].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
writes[i].pImageInfo = image_info.ptr + i;
}
vkUpdateDescriptorSets(vk.device, cast(u32)writes.length, writes.ptr, 0, null);
}
pragma(inline): void pragma(inline): void
Dispatch(Vulkan* vk, VkCommandBuffer cmd) Dispatch(Vulkan* vk, VkCommandBuffer cmd)
{ {

View File

@ -1,6 +1,9 @@
#version 460 #version 460
#extension GL_EXT_shader_8bit_storage : require #extension GL_EXT_shader_8bit_storage : require
#extension GL_GOOGLE_include_directive : require
#include "structures.layout"
layout (constant_id = 0) const int CHANNELS = 3; layout (constant_id = 0) const int CHANNELS = 3;
@ -11,9 +14,9 @@ layout (push_constant) uniform Constants {
uint y; uint y;
} PC; } PC;
layout (set = 0, binding = 0, rgba32f) uniform image2D dst; layout (set = 1, binding = 0, rgba32f) uniform image2D dst;
layout (set = 0, binding = 1) buffer input_array layout (set = 1, binding = 1) buffer input_array
{ {
uint8_t src[]; uint8_t src[];
}; };

View File

@ -1,7 +0,0 @@
#version 460
void main()
{
vec4 pos = vec4((float((gl_VertexIndex >> 1U) & 1U)) * 4.0 - 1.0, (float(gl_VertexIndex & 1U)) * 4.0 - 1.0, 0, 1.0);
gl_Position = pos;
}

View File

@ -1,84 +0,0 @@
#version 460
#extension GL_GOOGLE_include_directive : require
#extension GL_EXT_nonuniform_qualifier : require
#define COMPOSITE_PASS
#include "structures.layout"
layout (location = 0) out vec4 FragColor;
void BubbleSort(inout uvec2 array[OIT_LAYERS], int n)
{
for(int i = (n - 2); i >= 0; --i)
{
for(int j = 0; j <= i; ++j)
{
if(uintBitsToFloat(array[j].g) >= uintBitsToFloat(array[j+1].g))
{
uvec2 temp = array[j+1];
array[j+1] = array[j];
array[j] = temp;
}
}
}
}
float UnPreMultSRGBToLinear(float col)
{
return (col < 0.04045f) ? (col / 12.92f) : pow((col + 0.055f) / 1.055f, 2.4f);
}
vec4 UnPreMultSRGBToLinear(vec4 col)
{
col.r = UnPreMultSRGBToLinear(col.r);
col.g = UnPreMultSRGBToLinear(col.g);
col.b = UnPreMultSRGBToLinear(col.b);
return col;
}
void DoBlend(inout vec4 color, vec4 base_color)
{
color.rgb += (1 - color.a) * base_color.rgb;
color.a += (1 - color.a) * base_color.a;
}
void DoBlendPacked(inout vec4 color, uint fragment)
{
vec4 unpacked_color = unpackUnorm4x8(fragment);
//unpacked_color = UnPreMultSRGBToLinear(unpacked_color);
unpacked_color.rgb *= unpacked_color.a;
DoBlend(color, unpacked_color);
}
void main()
{
ivec2 coord = ivec2(gl_FragCoord.xy);
int store_mask = 0;
int view_size = int(G.res.x) * int(G.res.y);
int sample_id = 0;
int list_pos = view_size * OIT_LAYERS * sample_id + coord.y * int(G.res.x) + coord.x;
uvec2 array[OIT_LAYERS];
vec4 color = vec4(0.0);
int fragments = int(imageLoad(ImageAux, coord).r);
fragments = min(OIT_LAYERS, fragments);
for (int i = 0; i < fragments; i++)
{
array[i] = imageLoad(ImageABuffer, list_pos + i * view_size).rg;
}
BubbleSort(array, fragments);
vec4 color_sum = vec4(0.0);
for(int i = 0; i < fragments; i++)
{
DoBlendPacked(color_sum, array[i].x);
}
FragColor = color_sum;
}

View File

@ -1,79 +0,0 @@
#version 460
#extension GL_GOOGLE_include_directive : require
#extension GL_EXT_nonuniform_qualifier : require
#include "structures.layout"
layout (location = 0) in struct FragDataIn {
vec3 normal;
vec2 uv;
} FragData;
layout (location = 0, index = 0) out vec4 FragColor;
vec4 CalculateDirectionalLight(vec4 diff_col, vec4 diff_samp, vec4 light_col, vec3 dir, vec3 normal)
{
float diffuse_factor = max(dot(normal, -dir), 0.0);
vec4 ambient = vec4(vec3(G.ambient_color * diff_col), diff_samp.a);
vec4 diffuse = vec4(vec3(light_col * diffuse_factor), diff_samp.a);
diffuse *= diff_samp;
ambient *= diff_samp;
return (ambient + diffuse);
}
float UnPreMultLinearToSRGB(float col)
{
return col < 0.0031308f ? col * 12.92f : (pow(col, 1.0f / 2.4f) * 1.055f) - 0.055f;
}
vec4 UnPreMultLinearToSRGB(vec4 col)
{
col.r = UnPreMultLinearToSRGB(col.x);
col.g = UnPreMultLinearToSRGB(col.x);
col.b = UnPreMultLinearToSRGB(col.x);
return col;
}
void main()
{
ivec2 coord = ivec2(gl_FragCoord.xy);
int store_mask = 0;
int view_size = int(G.res.x) * int(G.res.y);
int sample_id = 0;
int list_pos = view_size * OIT_LAYERS * sample_id + coord.y * int(G.res.x) + coord.x;
vec4 col = Materials[nonuniformEXT(PC.mat_id)].diffuse;
vec4 tex_col = texture(sampler2D(Textures[nonuniformEXT(Materials[nonuniformEXT(PC.mat_id)].albedo_texture)], SamplerNearest), FragData.uv);
vec4 out_col = CalculateDirectionalLight(col, tex_col, G.light_color, G.light_direction, FragData.normal);
out_col.a = 1.0;// Materials[nonuniformEXT(PC.mat_id)].alpha;
if (Materials[nonuniformEXT(PC.mat_id)].alpha_has_texture)
{
vec4 alpha_col = texture(sampler2D(Textures[nonuniformEXT(Materials[nonuniformEXT(PC.mat_id)].alpha_texture)], SamplerNearest), FragData.uv);
out_col.a = alpha_col.a;
}
//FragColor = out_col;
//TODO: set up OIT again
vec4 srgb_col = out_col;// UnPreMultLinearToSRGB(out_col);
uvec4 store_value = uvec4(packUnorm4x8(srgb_col), floatBitsToUint(gl_FragCoord.z), store_mask, 0);
uint old_counter = imageAtomicAdd(ImageAux, coord, 1u);
if (old_counter < OIT_LAYERS)
{
imageStore(ImageABuffer, list_pos + int(old_counter) * view_size, store_value);
FragColor = vec4(0.0);
}
else
{
FragColor = vec4(0.0); //vec4(out_col.rgb * out_col.a, out_col.a); // Change to vec4(0.0) to disable tail blending
}
}

View File

@ -1,57 +0,0 @@
#version 460
#extension GL_GOOGLE_include_directive : require
#extension GL_EXT_nonuniform_qualifier : require
#include "structures.layout"
layout (set = 1, binding = 0) uniform texture2D AlbedoTexture;
layout (set = 1, binding = 1) uniform texture2D AmbientTexture;
layout (set = 1, binding = 2) uniform texture2D SpecularTexture;
layout (set = 1, binding = 3) uniform texture2D AlphaTexture;
layout (set = 1, binding = 4) uniform Material {
vec4 ambient;
vec4 diffuse;
vec4 specular;
bool albedo_has_texture;
bool ambient_has_texture;
bool specular_has_texture;
bool alpha_has_texture;
float shininess;
float alpha;
} Materials[];
layout (push_constant) uniform Constants {
mat4 model_matrix;
uint mat_id;
} PC;
layout (location = 0) in vec4 in_col;
layout (location = 1) in vec4 in_tangent;
layout (location = 2) in vec3 in_pos;
layout (location = 3) in vec3 in_normal;
layout (location = 4) in vec2 in_uv;
layout (location = 0) out struct FragDataOut {
vec3 normal;
vec2 uv;
} FragData;
mat4 y_matrix = mat4(
1.0, 0.0, 0.0, 0.0,
0.0, -1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
);
void main()
{
gl_Position = G.projection_matrix * G.view_matrix * PC.model_matrix * vec4(in_pos, 1.0);
FragData.uv = in_uv;
FragData.normal = mat3(PC.model_matrix) * in_normal;
}

View File

@ -1,15 +0,0 @@
#version 460
#extension GL_GOOGLE_include_directive : require
#extension GL_EXT_nonuniform_qualifier : require
#include "structures.layout"
layout (location = 0) in vec3 fragColor;
layout (location = 0) out vec4 outColor;
void main()
{
outColor = vec4(fragColor, 1.0);
}

View File

@ -1,27 +0,0 @@
#version 460
#extension GL_GOOGLE_include_directive : require
#extension GL_EXT_nonuniform_qualifier : require
#include "structures.layout"
layout (location = 0) out vec3 fragColor;
vec2 positions[3] = vec2[](
vec2(0.0, -0.5),
vec2(0.5, 0.5),
vec2(-0.5, 0.5)
);
vec3 colors[3] = vec3[](
vec3(1.0, 0.0, 0.0),
vec3(0.0, 1.0, 0.0),
vec3(0.0, 0.0, 1.0)
);
void main()
{
gl_Position = vec4(positions[gl_VertexIndex], 0.0, 1.0);
fragColor = colors[gl_VertexIndex];
}