procedural-3d-engine/examples/deferred/deferred.cpp
SRSaunders 9a562a5426
Macos ios fixes (#1192)
* Configure MoltenVK to use a dedicated compute queue for compute[*] examples with sync barriers

* Modify descriptorindexing example for iOS and variable descriptor count limitations on MoltenVK

* Remove obsolete macOS #ifdefs no longer needed for modern MoltenVK versions

* Update iOS project to fix missing vkloader.c reference and revise example list

* Set required features and API version for VVL in debugprintf example

* Remove unnecessary Apple-specific code from descriptorindexing example

* Add Layer Settings capability to VulkanExampleBase::createInstance()

* Replace setenv() in examples with Layer Settings configuration for macOS/iOS

* Update comments in examples.h and fix missing initializer in computeraytracing example

* Update imgui overlay and example to support iOS Simulator

* Update more comments in examples.h and remove redundant initializers in deferred* examples

* Separate variable descriptor count declarations for apple and non-apple platforms

* Consolidate variable descriptor count declarations for apple vs. non-apple platforms

* Configure MoltenVK with a dedicated compute queue in VulkanExampleBase() and remove from samples
2025-03-29 16:21:37 +01:00

804 lines
34 KiB
C++

/*
* Vulkan Example - Deferred shading with multiple render targets (aka G-Buffer) example
*
* This samples shows how to do deferred rendering. Unlike forward rendering, different components like
* albedo, normals, world positions are rendered to offscreen images which are then put together and lit
* in a composition pass
* Use the dropdown in the ui to switch between the final composition pass or the separate components
*
*
* Copyright (C) 2016-2023 by Sascha Willems - www.saschawillems.de
*
* This code is licensed under the MIT license (MIT) (http://opensource.org/licenses/MIT)
*/
#include "vulkanexamplebase.h"
#include "VulkanglTFModel.h"
class VulkanExample : public VulkanExampleBase
{
public:
int32_t debugDisplayTarget = 0;
struct {
struct {
vks::Texture2D colorMap;
vks::Texture2D normalMap;
} model;
struct {
vks::Texture2D colorMap;
vks::Texture2D normalMap;
} floor;
} textures;
struct {
vkglTF::Model model;
vkglTF::Model floor;
} models;
struct UniformDataOffscreen {
glm::mat4 projection;
glm::mat4 model;
glm::mat4 view;
glm::vec4 instancePos[3];
} uniformDataOffscreen;
struct Light {
glm::vec4 position;
glm::vec3 color;
float radius;
};
struct UniformDataComposition {
Light lights[6];
glm::vec4 viewPos;
int debugDisplayTarget = 0;
} uniformDataComposition;
struct {
vks::Buffer offscreen;
vks::Buffer composition;
} uniformBuffers;
struct {
VkPipeline offscreen{ VK_NULL_HANDLE };
VkPipeline composition{ VK_NULL_HANDLE };
} pipelines;
VkPipelineLayout pipelineLayout{ VK_NULL_HANDLE };
struct {
VkDescriptorSet model{ VK_NULL_HANDLE };
VkDescriptorSet floor{ VK_NULL_HANDLE };
VkDescriptorSet composition{ VK_NULL_HANDLE };
} descriptorSets;
VkDescriptorSetLayout descriptorSetLayout{ VK_NULL_HANDLE };
// Framebuffers holding the deferred attachments
struct FrameBufferAttachment {
VkImage image;
VkDeviceMemory mem;
VkImageView view;
VkFormat format;
};
struct FrameBuffer {
int32_t width, height;
VkFramebuffer frameBuffer;
// One attachment for every component required for a deferred rendering setup
FrameBufferAttachment position, normal, albedo;
FrameBufferAttachment depth;
VkRenderPass renderPass;
} offScreenFrameBuf{};
// One sampler for the frame buffer color attachments
VkSampler colorSampler{ VK_NULL_HANDLE };
VkCommandBuffer offScreenCmdBuffer{ VK_NULL_HANDLE };
// Semaphore used to synchronize between offscreen and final scene rendering
VkSemaphore offscreenSemaphore{ VK_NULL_HANDLE };
VulkanExample() : VulkanExampleBase()
{
title = "Deferred shading";
camera.type = Camera::CameraType::firstperson;
camera.movementSpeed = 5.0f;
#ifndef __ANDROID__
camera.rotationSpeed = 0.25f;
#endif
camera.position = { 2.15f, 0.3f, -8.75f };
camera.setRotation(glm::vec3(-0.75f, 12.5f, 0.0f));
camera.setPerspective(60.0f, (float)width / (float)height, 0.1f, 256.0f);
}
~VulkanExample()
{
if (device) {
vkDestroySampler(device, colorSampler, nullptr);
// Frame buffer
// Color attachments
vkDestroyImageView(device, offScreenFrameBuf.position.view, nullptr);
vkDestroyImage(device, offScreenFrameBuf.position.image, nullptr);
vkFreeMemory(device, offScreenFrameBuf.position.mem, nullptr);
vkDestroyImageView(device, offScreenFrameBuf.normal.view, nullptr);
vkDestroyImage(device, offScreenFrameBuf.normal.image, nullptr);
vkFreeMemory(device, offScreenFrameBuf.normal.mem, nullptr);
vkDestroyImageView(device, offScreenFrameBuf.albedo.view, nullptr);
vkDestroyImage(device, offScreenFrameBuf.albedo.image, nullptr);
vkFreeMemory(device, offScreenFrameBuf.albedo.mem, nullptr);
// Depth attachment
vkDestroyImageView(device, offScreenFrameBuf.depth.view, nullptr);
vkDestroyImage(device, offScreenFrameBuf.depth.image, nullptr);
vkFreeMemory(device, offScreenFrameBuf.depth.mem, nullptr);
vkDestroyFramebuffer(device, offScreenFrameBuf.frameBuffer, nullptr);
vkDestroyPipeline(device, pipelines.composition, nullptr);
vkDestroyPipeline(device, pipelines.offscreen, nullptr);
vkDestroyPipelineLayout(device, pipelineLayout, nullptr);
vkDestroyDescriptorSetLayout(device, descriptorSetLayout, nullptr);
// Uniform buffers
uniformBuffers.offscreen.destroy();
uniformBuffers.composition.destroy();
vkDestroyRenderPass(device, offScreenFrameBuf.renderPass, nullptr);
textures.model.colorMap.destroy();
textures.model.normalMap.destroy();
textures.floor.colorMap.destroy();
textures.floor.normalMap.destroy();
vkDestroySemaphore(device, offscreenSemaphore, nullptr);
}
}
// Enable physical device features required for this example
virtual void getEnabledFeatures()
{
// Enable anisotropic filtering if supported
if (deviceFeatures.samplerAnisotropy) {
enabledFeatures.samplerAnisotropy = VK_TRUE;
}
};
// Create a frame buffer attachment
void createAttachment(
VkFormat format,
VkImageUsageFlagBits usage,
FrameBufferAttachment *attachment)
{
VkImageAspectFlags aspectMask = 0;
VkImageLayout imageLayout;
attachment->format = format;
if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
{
aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
}
if (usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
{
aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
if (format >= VK_FORMAT_D16_UNORM_S8_UINT)
aspectMask |=VK_IMAGE_ASPECT_STENCIL_BIT;
imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
}
assert(aspectMask > 0);
VkImageCreateInfo image = vks::initializers::imageCreateInfo();
image.imageType = VK_IMAGE_TYPE_2D;
image.format = format;
image.extent.width = offScreenFrameBuf.width;
image.extent.height = offScreenFrameBuf.height;
image.extent.depth = 1;
image.mipLevels = 1;
image.arrayLayers = 1;
image.samples = VK_SAMPLE_COUNT_1_BIT;
image.tiling = VK_IMAGE_TILING_OPTIMAL;
image.usage = usage | VK_IMAGE_USAGE_SAMPLED_BIT;
VkMemoryAllocateInfo memAlloc = vks::initializers::memoryAllocateInfo();
VkMemoryRequirements memReqs;
VK_CHECK_RESULT(vkCreateImage(device, &image, nullptr, &attachment->image));
vkGetImageMemoryRequirements(device, attachment->image, &memReqs);
memAlloc.allocationSize = memReqs.size;
memAlloc.memoryTypeIndex = vulkanDevice->getMemoryType(memReqs.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
VK_CHECK_RESULT(vkAllocateMemory(device, &memAlloc, nullptr, &attachment->mem));
VK_CHECK_RESULT(vkBindImageMemory(device, attachment->image, attachment->mem, 0));
VkImageViewCreateInfo imageView = vks::initializers::imageViewCreateInfo();
imageView.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageView.format = format;
imageView.subresourceRange = {};
imageView.subresourceRange.aspectMask = aspectMask;
imageView.subresourceRange.baseMipLevel = 0;
imageView.subresourceRange.levelCount = 1;
imageView.subresourceRange.baseArrayLayer = 0;
imageView.subresourceRange.layerCount = 1;
imageView.image = attachment->image;
VK_CHECK_RESULT(vkCreateImageView(device, &imageView, nullptr, &attachment->view));
}
// Prepare a new framebuffer and attachments for offscreen rendering (G-Buffer)
void prepareOffscreenFramebuffer()
{
// Note: Instead of using fixed sizes, one could also match the window size and recreate the attachments on resize
offScreenFrameBuf.width = 2048;
offScreenFrameBuf.height = 2048;
// Color attachments
// (World space) Positions
createAttachment(
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
&offScreenFrameBuf.position);
// (World space) Normals
createAttachment(
VK_FORMAT_R16G16B16A16_SFLOAT,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
&offScreenFrameBuf.normal);
// Albedo (color)
createAttachment(
VK_FORMAT_R8G8B8A8_UNORM,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
&offScreenFrameBuf.albedo);
// Depth attachment
// Find a suitable depth format
VkFormat attDepthFormat;
VkBool32 validDepthFormat = vks::tools::getSupportedDepthFormat(physicalDevice, &attDepthFormat);
assert(validDepthFormat);
createAttachment(
attDepthFormat,
VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
&offScreenFrameBuf.depth);
// Set up separate renderpass with references to the color and depth attachments
std::array<VkAttachmentDescription, 4> attachmentDescs = {};
// Init attachment properties
for (uint32_t i = 0; i < 4; ++i)
{
attachmentDescs[i].samples = VK_SAMPLE_COUNT_1_BIT;
attachmentDescs[i].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
attachmentDescs[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
attachmentDescs[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescs[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
if (i == 3)
{
attachmentDescs[i].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
attachmentDescs[i].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
}
else
{
attachmentDescs[i].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
attachmentDescs[i].finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
}
// Formats
attachmentDescs[0].format = offScreenFrameBuf.position.format;
attachmentDescs[1].format = offScreenFrameBuf.normal.format;
attachmentDescs[2].format = offScreenFrameBuf.albedo.format;
attachmentDescs[3].format = offScreenFrameBuf.depth.format;
std::vector<VkAttachmentReference> colorReferences;
colorReferences.push_back({ 0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL });
colorReferences.push_back({ 1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL });
colorReferences.push_back({ 2, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL });
VkAttachmentReference depthReference = {};
depthReference.attachment = 3;
depthReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
VkSubpassDescription subpass = {};
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.pColorAttachments = colorReferences.data();
subpass.colorAttachmentCount = static_cast<uint32_t>(colorReferences.size());
subpass.pDepthStencilAttachment = &depthReference;
// Use subpass dependencies for attachment layout transitions
std::array<VkSubpassDependency, 2> dependencies;
dependencies[0].srcSubpass = VK_SUBPASS_EXTERNAL;
dependencies[0].dstSubpass = 0;
dependencies[0].srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
dependencies[0].dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dependencies[0].srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
dependencies[0].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
dependencies[0].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
dependencies[1].srcSubpass = 0;
dependencies[1].dstSubpass = VK_SUBPASS_EXTERNAL;
dependencies[1].srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dependencies[1].dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
dependencies[1].srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
dependencies[1].dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
dependencies[1].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
VkRenderPassCreateInfo renderPassInfo = {};
renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassInfo.pAttachments = attachmentDescs.data();
renderPassInfo.attachmentCount = static_cast<uint32_t>(attachmentDescs.size());
renderPassInfo.subpassCount = 1;
renderPassInfo.pSubpasses = &subpass;
renderPassInfo.dependencyCount = 2;
renderPassInfo.pDependencies = dependencies.data();
VK_CHECK_RESULT(vkCreateRenderPass(device, &renderPassInfo, nullptr, &offScreenFrameBuf.renderPass));
std::array<VkImageView,4> attachments;
attachments[0] = offScreenFrameBuf.position.view;
attachments[1] = offScreenFrameBuf.normal.view;
attachments[2] = offScreenFrameBuf.albedo.view;
attachments[3] = offScreenFrameBuf.depth.view;
VkFramebufferCreateInfo fbufCreateInfo = {};
fbufCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
fbufCreateInfo.pNext = NULL;
fbufCreateInfo.renderPass = offScreenFrameBuf.renderPass;
fbufCreateInfo.pAttachments = attachments.data();
fbufCreateInfo.attachmentCount = static_cast<uint32_t>(attachments.size());
fbufCreateInfo.width = offScreenFrameBuf.width;
fbufCreateInfo.height = offScreenFrameBuf.height;
fbufCreateInfo.layers = 1;
VK_CHECK_RESULT(vkCreateFramebuffer(device, &fbufCreateInfo, nullptr, &offScreenFrameBuf.frameBuffer));
// Create sampler to sample from the color attachments
VkSamplerCreateInfo sampler = vks::initializers::samplerCreateInfo();
sampler.magFilter = VK_FILTER_NEAREST;
sampler.minFilter = VK_FILTER_NEAREST;
sampler.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
sampler.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
sampler.addressModeV = sampler.addressModeU;
sampler.addressModeW = sampler.addressModeU;
sampler.mipLodBias = 0.0f;
sampler.maxAnisotropy = 1.0f;
sampler.minLod = 0.0f;
sampler.maxLod = 1.0f;
sampler.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
VK_CHECK_RESULT(vkCreateSampler(device, &sampler, nullptr, &colorSampler));
}
// Build command buffer for rendering the scene to the offscreen frame buffer attachments
void buildDeferredCommandBuffer()
{
if (offScreenCmdBuffer == VK_NULL_HANDLE) {
offScreenCmdBuffer = vulkanDevice->createCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY, false);
}
// Create a semaphore used to synchronize offscreen rendering and usage
VkSemaphoreCreateInfo semaphoreCreateInfo = vks::initializers::semaphoreCreateInfo();
VK_CHECK_RESULT(vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &offscreenSemaphore));
VkCommandBufferBeginInfo cmdBufInfo = vks::initializers::commandBufferBeginInfo();
// Clear values for all attachments written in the fragment shader
std::array<VkClearValue,4> clearValues;
clearValues[0].color = { { 0.0f, 0.0f, 0.0f, 0.0f } };
clearValues[1].color = { { 0.0f, 0.0f, 0.0f, 0.0f } };
clearValues[2].color = { { 0.0f, 0.0f, 0.0f, 0.0f } };
clearValues[3].depthStencil = { 1.0f, 0 };
VkRenderPassBeginInfo renderPassBeginInfo = vks::initializers::renderPassBeginInfo();
renderPassBeginInfo.renderPass = offScreenFrameBuf.renderPass;
renderPassBeginInfo.framebuffer = offScreenFrameBuf.frameBuffer;
renderPassBeginInfo.renderArea.extent.width = offScreenFrameBuf.width;
renderPassBeginInfo.renderArea.extent.height = offScreenFrameBuf.height;
renderPassBeginInfo.clearValueCount = static_cast<uint32_t>(clearValues.size());
renderPassBeginInfo.pClearValues = clearValues.data();
VK_CHECK_RESULT(vkBeginCommandBuffer(offScreenCmdBuffer, &cmdBufInfo));
vkCmdBeginRenderPass(offScreenCmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
VkViewport viewport = vks::initializers::viewport((float)offScreenFrameBuf.width, (float)offScreenFrameBuf.height, 0.0f, 1.0f);
vkCmdSetViewport(offScreenCmdBuffer, 0, 1, &viewport);
VkRect2D scissor = vks::initializers::rect2D(offScreenFrameBuf.width, offScreenFrameBuf.height, 0, 0);
vkCmdSetScissor(offScreenCmdBuffer, 0, 1, &scissor);
vkCmdBindPipeline(offScreenCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.offscreen);
// Floor
vkCmdBindDescriptorSets(offScreenCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.floor, 0, nullptr);
models.floor.draw(offScreenCmdBuffer);
// We render multiple instances of a model
vkCmdBindDescriptorSets(offScreenCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.model, 0, nullptr);
models.model.bindBuffers(offScreenCmdBuffer);
vkCmdDrawIndexed(offScreenCmdBuffer, models.model.indices.count, 3, 0, 0, 0);
vkCmdEndRenderPass(offScreenCmdBuffer);
VK_CHECK_RESULT(vkEndCommandBuffer(offScreenCmdBuffer));
}
void loadAssets()
{
const uint32_t glTFLoadingFlags = vkglTF::FileLoadingFlags::PreTransformVertices | vkglTF::FileLoadingFlags::PreMultiplyVertexColors | vkglTF::FileLoadingFlags::FlipY;
models.model.loadFromFile(getAssetPath() + "models/armor/armor.gltf", vulkanDevice, queue, glTFLoadingFlags);
models.floor.loadFromFile(getAssetPath() + "models/deferred_floor.gltf", vulkanDevice, queue, glTFLoadingFlags);
textures.model.colorMap.loadFromFile(getAssetPath() + "models/armor/colormap_rgba.ktx", VK_FORMAT_R8G8B8A8_UNORM, vulkanDevice, queue);
textures.model.normalMap.loadFromFile(getAssetPath() + "models/armor/normalmap_rgba.ktx", VK_FORMAT_R8G8B8A8_UNORM, vulkanDevice, queue);
textures.floor.colorMap.loadFromFile(getAssetPath() + "textures/stonefloor01_color_rgba.ktx", VK_FORMAT_R8G8B8A8_UNORM, vulkanDevice, queue);
textures.floor.normalMap.loadFromFile(getAssetPath() + "textures/stonefloor01_normal_rgba.ktx", VK_FORMAT_R8G8B8A8_UNORM, vulkanDevice, queue);
}
void buildCommandBuffers()
{
VkCommandBufferBeginInfo cmdBufInfo = vks::initializers::commandBufferBeginInfo();
VkClearValue clearValues[2];
clearValues[0].color = { { 0.0f, 0.0f, 0.2f, 0.0f } };
clearValues[1].depthStencil = { 1.0f, 0 };
VkRenderPassBeginInfo renderPassBeginInfo = vks::initializers::renderPassBeginInfo();
renderPassBeginInfo.renderPass = renderPass;
renderPassBeginInfo.renderArea.offset.x = 0;
renderPassBeginInfo.renderArea.offset.y = 0;
renderPassBeginInfo.renderArea.extent.width = width;
renderPassBeginInfo.renderArea.extent.height = height;
renderPassBeginInfo.clearValueCount = 2;
renderPassBeginInfo.pClearValues = clearValues;
for (int32_t i = 0; i < drawCmdBuffers.size(); ++i)
{
renderPassBeginInfo.framebuffer = frameBuffers[i];
VK_CHECK_RESULT(vkBeginCommandBuffer(drawCmdBuffers[i], &cmdBufInfo));
vkCmdBeginRenderPass(drawCmdBuffers[i], &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
VkViewport viewport = vks::initializers::viewport((float)width, (float)height, 0.0f, 1.0f);
vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport);
VkRect2D scissor = vks::initializers::rect2D(width, height, 0, 0);
vkCmdSetScissor(drawCmdBuffers[i], 0, 1, &scissor);
vkCmdBindDescriptorSets(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSets.composition, 0, nullptr);
vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.composition);
// Final composition
// This is done by simply drawing a full screen quad
// The fragment shader then combines the deferred attachments into the final image
// Note: Also used for debug display if debugDisplayTarget > 0
vkCmdDraw(drawCmdBuffers[i], 3, 1, 0, 0);
drawUI(drawCmdBuffers[i]);
vkCmdEndRenderPass(drawCmdBuffers[i]);
VK_CHECK_RESULT(vkEndCommandBuffer(drawCmdBuffers[i]));
}
}
void setupDescriptors()
{
// Pool
std::vector<VkDescriptorPoolSize> poolSizes = {
vks::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 8),
vks::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 9)
};
VkDescriptorPoolCreateInfo descriptorPoolInfo = vks::initializers::descriptorPoolCreateInfo(poolSizes, 3);
VK_CHECK_RESULT(vkCreateDescriptorPool(device, &descriptorPoolInfo, nullptr, &descriptorPool));
// Layouts
std::vector<VkDescriptorSetLayoutBinding> setLayoutBindings = {
// Binding 0 : Vertex shader uniform buffer
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_VERTEX_BIT, 0),
// Binding 1 : Position texture target / Scene colormap
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT, 1),
// Binding 2 : Normals texture target
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT, 2),
// Binding 3 : Albedo texture target
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT, 3),
// Binding 4 : Fragment shader uniform buffer
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_FRAGMENT_BIT, 4),
};
VkDescriptorSetLayoutCreateInfo descriptorLayout = vks::initializers::descriptorSetLayoutCreateInfo(setLayoutBindings);
VK_CHECK_RESULT(vkCreateDescriptorSetLayout(device, &descriptorLayout, nullptr, &descriptorSetLayout));
// Sets
std::vector<VkWriteDescriptorSet> writeDescriptorSets;
VkDescriptorSetAllocateInfo allocInfo = vks::initializers::descriptorSetAllocateInfo(descriptorPool, &descriptorSetLayout, 1);
// Image descriptors for the offscreen color attachments
VkDescriptorImageInfo texDescriptorPosition =
vks::initializers::descriptorImageInfo(
colorSampler,
offScreenFrameBuf.position.view,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
VkDescriptorImageInfo texDescriptorNormal =
vks::initializers::descriptorImageInfo(
colorSampler,
offScreenFrameBuf.normal.view,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
VkDescriptorImageInfo texDescriptorAlbedo =
vks::initializers::descriptorImageInfo(
colorSampler,
offScreenFrameBuf.albedo.view,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
// Deferred composition
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.composition));
writeDescriptorSets = {
// Binding 1 : Position texture target
vks::initializers::writeDescriptorSet(descriptorSets.composition, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &texDescriptorPosition),
// Binding 2 : Normals texture target
vks::initializers::writeDescriptorSet(descriptorSets.composition, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2, &texDescriptorNormal),
// Binding 3 : Albedo texture target
vks::initializers::writeDescriptorSet(descriptorSets.composition, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 3, &texDescriptorAlbedo),
// Binding 4 : Fragment shader uniform buffer
vks::initializers::writeDescriptorSet(descriptorSets.composition, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 4, &uniformBuffers.composition.descriptor),
};
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, nullptr);
// Offscreen (scene)
// Model
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.model));
writeDescriptorSets = {
// Binding 0: Vertex shader uniform buffer
vks::initializers::writeDescriptorSet(descriptorSets.model, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformBuffers.offscreen.descriptor),
// Binding 1: Color map
vks::initializers::writeDescriptorSet(descriptorSets.model, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &textures.model.colorMap.descriptor),
// Binding 2: Normal map
vks::initializers::writeDescriptorSet(descriptorSets.model, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2, &textures.model.normalMap.descriptor)
};
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, nullptr);
// Background
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.floor));
writeDescriptorSets = {
// Binding 0: Vertex shader uniform buffer
vks::initializers::writeDescriptorSet(descriptorSets.floor, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformBuffers.offscreen.descriptor),
// Binding 1: Color map
vks::initializers::writeDescriptorSet(descriptorSets.floor, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &textures.floor.colorMap.descriptor),
// Binding 2: Normal map
vks::initializers::writeDescriptorSet(descriptorSets.floor, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2, &textures.floor.normalMap.descriptor)
};
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, nullptr);
}
void preparePipelines()
{
// Pipeline layout
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo = vks::initializers::pipelineLayoutCreateInfo(&descriptorSetLayout, 1);
VK_CHECK_RESULT(vkCreatePipelineLayout(device, &pipelineLayoutCreateInfo, nullptr, &pipelineLayout));
// Pipelines
VkPipelineInputAssemblyStateCreateInfo inputAssemblyState = vks::initializers::pipelineInputAssemblyStateCreateInfo(VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 0, VK_FALSE);
VkPipelineRasterizationStateCreateInfo rasterizationState = vks::initializers::pipelineRasterizationStateCreateInfo(VK_POLYGON_MODE_FILL, VK_CULL_MODE_BACK_BIT, VK_FRONT_FACE_COUNTER_CLOCKWISE, 0);
VkPipelineColorBlendAttachmentState blendAttachmentState = vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE);
VkPipelineColorBlendStateCreateInfo colorBlendState = vks::initializers::pipelineColorBlendStateCreateInfo(1, &blendAttachmentState);
VkPipelineDepthStencilStateCreateInfo depthStencilState = vks::initializers::pipelineDepthStencilStateCreateInfo(VK_TRUE, VK_TRUE, VK_COMPARE_OP_LESS_OR_EQUAL);
VkPipelineViewportStateCreateInfo viewportState = vks::initializers::pipelineViewportStateCreateInfo(1, 1, 0);
VkPipelineMultisampleStateCreateInfo multisampleState = vks::initializers::pipelineMultisampleStateCreateInfo(VK_SAMPLE_COUNT_1_BIT, 0);
std::vector<VkDynamicState> dynamicStateEnables = {VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR};
VkPipelineDynamicStateCreateInfo dynamicState = vks::initializers::pipelineDynamicStateCreateInfo(dynamicStateEnables);
std::array<VkPipelineShaderStageCreateInfo, 2> shaderStages;
VkGraphicsPipelineCreateInfo pipelineCI = vks::initializers::pipelineCreateInfo(pipelineLayout, renderPass);
pipelineCI.pInputAssemblyState = &inputAssemblyState;
pipelineCI.pRasterizationState = &rasterizationState;
pipelineCI.pColorBlendState = &colorBlendState;
pipelineCI.pMultisampleState = &multisampleState;
pipelineCI.pViewportState = &viewportState;
pipelineCI.pDepthStencilState = &depthStencilState;
pipelineCI.pDynamicState = &dynamicState;
pipelineCI.stageCount = static_cast<uint32_t>(shaderStages.size());
pipelineCI.pStages = shaderStages.data();
// Final fullscreen composition pass pipeline
rasterizationState.cullMode = VK_CULL_MODE_FRONT_BIT;
shaderStages[0] = loadShader(getShadersPath() + "deferred/deferred.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
shaderStages[1] = loadShader(getShadersPath() + "deferred/deferred.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
// Empty vertex input state, vertices are generated by the vertex shader
VkPipelineVertexInputStateCreateInfo emptyInputState = vks::initializers::pipelineVertexInputStateCreateInfo();
pipelineCI.pVertexInputState = &emptyInputState;
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCI, nullptr, &pipelines.composition));
// Vertex input state from glTF model for pipeline rendering models
pipelineCI.pVertexInputState = vkglTF::Vertex::getPipelineVertexInputState({vkglTF::VertexComponent::Position, vkglTF::VertexComponent::UV, vkglTF::VertexComponent::Color, vkglTF::VertexComponent::Normal, vkglTF::VertexComponent::Tangent});
rasterizationState.cullMode = VK_CULL_MODE_BACK_BIT;
// Offscreen pipeline
shaderStages[0] = loadShader(getShadersPath() + "deferred/mrt.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
shaderStages[1] = loadShader(getShadersPath() + "deferred/mrt.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
// Separate render pass
pipelineCI.renderPass = offScreenFrameBuf.renderPass;
// Blend attachment states required for all color attachments
// This is important, as color write mask will otherwise be 0x0 and you
// won't see anything rendered to the attachment
std::array<VkPipelineColorBlendAttachmentState, 3> blendAttachmentStates = {
vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE),
vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE),
vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE)
};
colorBlendState.attachmentCount = static_cast<uint32_t>(blendAttachmentStates.size());
colorBlendState.pAttachments = blendAttachmentStates.data();
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCI, nullptr, &pipelines.offscreen));
}
// Prepare and initialize uniform buffer containing shader uniforms
void prepareUniformBuffers()
{
// Offscreen vertex shader
VK_CHECK_RESULT(vulkanDevice->createBuffer(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, &uniformBuffers.offscreen, sizeof(UniformDataOffscreen)));
// Deferred fragment shader
VK_CHECK_RESULT(vulkanDevice->createBuffer(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, &uniformBuffers.composition, sizeof(UniformDataComposition)));
// Map persistent
VK_CHECK_RESULT(uniformBuffers.offscreen.map());
VK_CHECK_RESULT(uniformBuffers.composition.map());
// Setup instanced model positions
uniformDataOffscreen.instancePos[0] = glm::vec4(0.0f);
uniformDataOffscreen.instancePos[1] = glm::vec4(-4.0f, 0.0, -4.0f, 0.0f);
uniformDataOffscreen.instancePos[2] = glm::vec4(4.0f, 0.0, -4.0f, 0.0f);
// Update
updateUniformBufferOffscreen();
updateUniformBufferComposition();
}
// Update matrices used for the offscreen rendering of the scene
void updateUniformBufferOffscreen()
{
uniformDataOffscreen.projection = camera.matrices.perspective;
uniformDataOffscreen.view = camera.matrices.view;
uniformDataOffscreen.model = glm::mat4(1.0f);
memcpy(uniformBuffers.offscreen.mapped, &uniformDataOffscreen, sizeof(UniformDataOffscreen));
}
// Update lights and parameters passed to the composition shaders
void updateUniformBufferComposition()
{
// White
uniformDataComposition.lights[0].position = glm::vec4(0.0f, 0.0f, 1.0f, 0.0f);
uniformDataComposition.lights[0].color = glm::vec3(1.5f);
uniformDataComposition.lights[0].radius = 15.0f * 0.25f;
// Red
uniformDataComposition.lights[1].position = glm::vec4(-2.0f, 0.0f, 0.0f, 0.0f);
uniformDataComposition.lights[1].color = glm::vec3(1.0f, 0.0f, 0.0f);
uniformDataComposition.lights[1].radius = 15.0f;
// Blue
uniformDataComposition.lights[2].position = glm::vec4(2.0f, -1.0f, 0.0f, 0.0f);
uniformDataComposition.lights[2].color = glm::vec3(0.0f, 0.0f, 2.5f);
uniformDataComposition.lights[2].radius = 5.0f;
// Yellow
uniformDataComposition.lights[3].position = glm::vec4(0.0f, -0.9f, 0.5f, 0.0f);
uniformDataComposition.lights[3].color = glm::vec3(1.0f, 1.0f, 0.0f);
uniformDataComposition.lights[3].radius = 2.0f;
// Green
uniformDataComposition.lights[4].position = glm::vec4(0.0f, -0.5f, 0.0f, 0.0f);
uniformDataComposition.lights[4].color = glm::vec3(0.0f, 1.0f, 0.2f);
uniformDataComposition.lights[4].radius = 5.0f;
// Yellow
uniformDataComposition.lights[5].position = glm::vec4(0.0f, -1.0f, 0.0f, 0.0f);
uniformDataComposition.lights[5].color = glm::vec3(1.0f, 0.7f, 0.3f);
uniformDataComposition.lights[5].radius = 25.0f;
// Animate the lights
if (!paused) {
uniformDataComposition.lights[0].position.x = sin(glm::radians(360.0f * timer)) * 5.0f;
uniformDataComposition.lights[0].position.z = cos(glm::radians(360.0f * timer)) * 5.0f;
uniformDataComposition.lights[1].position.x = -4.0f + sin(glm::radians(360.0f * timer) + 45.0f) * 2.0f;
uniformDataComposition.lights[1].position.z = 0.0f + cos(glm::radians(360.0f * timer) + 45.0f) * 2.0f;
uniformDataComposition.lights[2].position.x = 4.0f + sin(glm::radians(360.0f * timer)) * 2.0f;
uniformDataComposition.lights[2].position.z = 0.0f + cos(glm::radians(360.0f * timer)) * 2.0f;
uniformDataComposition.lights[4].position.x = 0.0f + sin(glm::radians(360.0f * timer + 90.0f)) * 5.0f;
uniformDataComposition.lights[4].position.z = 0.0f - cos(glm::radians(360.0f * timer + 45.0f)) * 5.0f;
uniformDataComposition.lights[5].position.x = 0.0f + sin(glm::radians(-360.0f * timer + 135.0f)) * 10.0f;
uniformDataComposition.lights[5].position.z = 0.0f - cos(glm::radians(-360.0f * timer - 45.0f)) * 10.0f;
}
// Current view position
uniformDataComposition.viewPos = glm::vec4(camera.position, 0.0f) * glm::vec4(-1.0f, 1.0f, -1.0f, 1.0f);
uniformDataComposition.debugDisplayTarget = debugDisplayTarget;
memcpy(uniformBuffers.composition.mapped, &uniformDataComposition, sizeof(UniformDataComposition));
}
void prepare()
{
VulkanExampleBase::prepare();
loadAssets();
prepareOffscreenFramebuffer();
prepareUniformBuffers();
setupDescriptors();
preparePipelines();
buildCommandBuffers();
buildDeferredCommandBuffer();
prepared = true;
}
void draw()
{
VulkanExampleBase::prepareFrame();
// The scene render command buffer has to wait for the offscreen
// rendering to be finished before we can use the framebuffer
// color image for sampling during final rendering
// To ensure this we use a dedicated offscreen synchronization
// semaphore that will be signaled when offscreen rendering
// has been finished
// This is necessary as an implementation may start both
// command buffers at the same time, there is no guarantee
// that command buffers will be executed in the order they
// have been submitted by the application
// Offscreen rendering
// Wait for swap chain presentation to finish
submitInfo.pWaitSemaphores = &semaphores.presentComplete;
// Signal ready with offscreen semaphore
submitInfo.pSignalSemaphores = &offscreenSemaphore;
// Submit work
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &offScreenCmdBuffer;
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
// Scene rendering
// Wait for offscreen semaphore
submitInfo.pWaitSemaphores = &offscreenSemaphore;
// Signal ready with render complete semaphore
submitInfo.pSignalSemaphores = &semaphores.renderComplete;
// Submit work
submitInfo.pCommandBuffers = &drawCmdBuffers[currentBuffer];
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
VulkanExampleBase::submitFrame();
}
virtual void render()
{
if (!prepared)
return;
updateUniformBufferComposition();
updateUniformBufferOffscreen();
draw();
}
virtual void OnUpdateUIOverlay(vks::UIOverlay *overlay)
{
if (overlay->header("Settings")) {
overlay->comboBox("Display", &debugDisplayTarget, { "Final composition", "Position", "Normals", "Albedo", "Specular" });
}
}
};
VULKAN_EXAMPLE_MAIN()