The sample count was previously fixed at 8, which is not supported on some platforms. The code now checks for the maximum supported sample count and uses that instead. If you would prefer the upper limit to be fixed at 8, that is obviously trivial to change.
1194 lines
43 KiB
C++
1194 lines
43 KiB
C++
/*
|
|
* Vulkan Example - Multi sampling with explicit resolve for deferred shading example
|
|
*
|
|
* Copyright (C) 2016 by Sascha Willems - www.saschawillems.de
|
|
*
|
|
* This code is licensed under the MIT license (MIT) (http://opensource.org/licenses/MIT)
|
|
*/
|
|
|
|
#include <stdio.h>
|
|
#include <stdlib.h>
|
|
#include <string.h>
|
|
#include <assert.h>
|
|
#include <vector>
|
|
|
|
#define GLM_FORCE_RADIANS
|
|
#define GLM_FORCE_DEPTH_ZERO_TO_ONE
|
|
#include <glm/glm.hpp>
|
|
#include <glm/gtc/matrix_transform.hpp>
|
|
|
|
#include <vulkan/vulkan.h>
|
|
#include "vulkanexamplebase.h"
|
|
#include "VulkanBuffer.hpp"
|
|
#include "VulkanTexture.hpp"
|
|
#include "VulkanModel.hpp"
|
|
|
|
#define VERTEX_BUFFER_BIND_ID 0
|
|
#define ENABLE_VALIDATION false
|
|
|
|
class VulkanExample : public VulkanExampleBase
|
|
{
|
|
public:
|
|
bool debugDisplay = false;
|
|
bool useMSAA = true;
|
|
bool useSampleShading = true;
|
|
VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
struct {
|
|
struct {
|
|
vks::Texture2D colorMap;
|
|
vks::Texture2D normalMap;
|
|
} model;
|
|
struct {
|
|
vks::Texture2D colorMap;
|
|
vks::Texture2D normalMap;
|
|
} floor;
|
|
} textures;
|
|
|
|
// Vertex layout for the models
|
|
vks::VertexLayout vertexLayout = vks::VertexLayout({
|
|
vks::VERTEX_COMPONENT_POSITION,
|
|
vks::VERTEX_COMPONENT_UV,
|
|
vks::VERTEX_COMPONENT_COLOR,
|
|
vks::VERTEX_COMPONENT_NORMAL,
|
|
vks::VERTEX_COMPONENT_TANGENT,
|
|
});
|
|
|
|
struct {
|
|
vks::Model model;
|
|
vks::Model floor;
|
|
vks::Model quad;
|
|
} models;
|
|
|
|
struct {
|
|
VkPipelineVertexInputStateCreateInfo inputState;
|
|
std::vector<VkVertexInputBindingDescription> bindingDescriptions;
|
|
std::vector<VkVertexInputAttributeDescription> attributeDescriptions;
|
|
} vertices;
|
|
|
|
struct {
|
|
glm::mat4 projection;
|
|
glm::mat4 model;
|
|
glm::mat4 view;
|
|
glm::vec4 instancePos[3];
|
|
} uboVS, uboOffscreenVS;
|
|
|
|
struct Light {
|
|
glm::vec4 position;
|
|
glm::vec3 color;
|
|
float radius;
|
|
};
|
|
|
|
struct {
|
|
Light lights[6];
|
|
glm::vec4 viewPos;
|
|
glm::ivec2 windowSize;
|
|
} uboFragmentLights;
|
|
|
|
struct {
|
|
vks::Buffer vsFullScreen;
|
|
vks::Buffer vsOffscreen;
|
|
vks::Buffer fsLights;
|
|
} uniformBuffers;
|
|
|
|
struct {
|
|
VkPipeline deferred; // Deferred lighting calculation
|
|
VkPipeline deferredNoMSAA; // Deferred lighting calculation with explicit MSAA resolve
|
|
VkPipeline offscreen; // (Offscreen) scene rendering (fill G-Buffers)
|
|
VkPipeline offscreenSampleShading; // (Offscreen) scene rendering (fill G-Buffers) with sample shading rate enabled
|
|
VkPipeline debug; // G-Buffers debug display
|
|
} pipelines;
|
|
|
|
struct {
|
|
VkPipelineLayout deferred;
|
|
VkPipelineLayout offscreen;
|
|
} pipelineLayouts;
|
|
|
|
struct {
|
|
VkDescriptorSet model;
|
|
VkDescriptorSet floor;
|
|
} descriptorSets;
|
|
|
|
VkDescriptorSet descriptorSet;
|
|
VkDescriptorSetLayout descriptorSetLayout;
|
|
|
|
// Framebuffer for offscreen rendering
|
|
struct FrameBufferAttachment {
|
|
VkImage image;
|
|
VkDeviceMemory mem;
|
|
VkImageView view;
|
|
VkFormat format;
|
|
};
|
|
struct FrameBuffer {
|
|
int32_t width, height;
|
|
VkFramebuffer frameBuffer;
|
|
FrameBufferAttachment position, normal, albedo;
|
|
FrameBufferAttachment depth;
|
|
VkRenderPass renderPass;
|
|
} offScreenFrameBuf;
|
|
|
|
// One sampler for the frame buffer color attachments
|
|
VkSampler colorSampler;
|
|
|
|
VkCommandBuffer offScreenCmdBuffer = VK_NULL_HANDLE;
|
|
|
|
// Semaphore used to synchronize between offscreen and final scene rendering
|
|
VkSemaphore offscreenSemaphore = VK_NULL_HANDLE;
|
|
|
|
VulkanExample() : VulkanExampleBase(ENABLE_VALIDATION)
|
|
{
|
|
title = "Multi sampled deferred shading";
|
|
camera.type = Camera::CameraType::firstperson;
|
|
camera.movementSpeed = 5.0f;
|
|
#ifndef __ANDROID__
|
|
camera.rotationSpeed = 0.25f;
|
|
#endif
|
|
camera.position = { 2.15f, 0.3f, -8.75f };
|
|
camera.setRotation(glm::vec3(-0.75f, 12.5f, 0.0f));
|
|
camera.setPerspective(60.0f, (float)width / (float)height, 0.1f, 256.0f);
|
|
paused = true;
|
|
settings.overlay = true;
|
|
}
|
|
|
|
~VulkanExample()
|
|
{
|
|
// Clean up used Vulkan resources
|
|
// Note : Inherited destructor cleans up resources stored in base class
|
|
|
|
vkDestroySampler(device, colorSampler, nullptr);
|
|
|
|
// Frame buffer
|
|
|
|
// Color attachments
|
|
vkDestroyImageView(device, offScreenFrameBuf.position.view, nullptr);
|
|
vkDestroyImage(device, offScreenFrameBuf.position.image, nullptr);
|
|
vkFreeMemory(device, offScreenFrameBuf.position.mem, nullptr);
|
|
|
|
vkDestroyImageView(device, offScreenFrameBuf.normal.view, nullptr);
|
|
vkDestroyImage(device, offScreenFrameBuf.normal.image, nullptr);
|
|
vkFreeMemory(device, offScreenFrameBuf.normal.mem, nullptr);
|
|
|
|
vkDestroyImageView(device, offScreenFrameBuf.albedo.view, nullptr);
|
|
vkDestroyImage(device, offScreenFrameBuf.albedo.image, nullptr);
|
|
vkFreeMemory(device, offScreenFrameBuf.albedo.mem, nullptr);
|
|
|
|
// Depth attachment
|
|
vkDestroyImageView(device, offScreenFrameBuf.depth.view, nullptr);
|
|
vkDestroyImage(device, offScreenFrameBuf.depth.image, nullptr);
|
|
vkFreeMemory(device, offScreenFrameBuf.depth.mem, nullptr);
|
|
|
|
vkDestroyFramebuffer(device, offScreenFrameBuf.frameBuffer, nullptr);
|
|
|
|
vkDestroyPipeline(device, pipelines.deferred, nullptr);
|
|
vkDestroyPipeline(device, pipelines.deferredNoMSAA, nullptr);
|
|
vkDestroyPipeline(device, pipelines.offscreen, nullptr);
|
|
vkDestroyPipeline(device, pipelines.offscreenSampleShading, nullptr);
|
|
vkDestroyPipeline(device, pipelines.debug, nullptr);
|
|
|
|
vkDestroyPipelineLayout(device, pipelineLayouts.deferred, nullptr);
|
|
vkDestroyPipelineLayout(device, pipelineLayouts.offscreen, nullptr);
|
|
|
|
vkDestroyDescriptorSetLayout(device, descriptorSetLayout, nullptr);
|
|
|
|
// Meshes
|
|
models.model.destroy();
|
|
models.floor.destroy();
|
|
|
|
// Uniform buffers
|
|
uniformBuffers.vsOffscreen.destroy();
|
|
uniformBuffers.vsFullScreen.destroy();
|
|
uniformBuffers.fsLights.destroy();
|
|
|
|
vkFreeCommandBuffers(device, cmdPool, 1, &offScreenCmdBuffer);
|
|
|
|
vkDestroyRenderPass(device, offScreenFrameBuf.renderPass, nullptr);
|
|
|
|
textures.model.colorMap.destroy();
|
|
textures.model.normalMap.destroy();
|
|
textures.floor.colorMap.destroy();
|
|
textures.floor.normalMap.destroy();
|
|
|
|
vkDestroySemaphore(device, offscreenSemaphore, nullptr);
|
|
}
|
|
|
|
// Enable physical device features required for this example
|
|
virtual void getEnabledFeatures()
|
|
{
|
|
// Enable sample rate shading filtering if supported
|
|
if (deviceFeatures.sampleRateShading) {
|
|
enabledFeatures.sampleRateShading = VK_TRUE;
|
|
}
|
|
// Enable anisotropic filtering if supported
|
|
if (deviceFeatures.samplerAnisotropy) {
|
|
enabledFeatures.samplerAnisotropy = VK_TRUE;
|
|
}
|
|
// Enable texture compression
|
|
if (deviceFeatures.textureCompressionBC) {
|
|
enabledFeatures.textureCompressionBC = VK_TRUE;
|
|
}
|
|
else if (deviceFeatures.textureCompressionASTC_LDR) {
|
|
enabledFeatures.textureCompressionASTC_LDR = VK_TRUE;
|
|
}
|
|
else if (deviceFeatures.textureCompressionETC2) {
|
|
enabledFeatures.textureCompressionETC2 = VK_TRUE;
|
|
}
|
|
};
|
|
|
|
// Create a frame buffer attachment
|
|
void createAttachment(
|
|
VkFormat format,
|
|
VkImageUsageFlagBits usage,
|
|
FrameBufferAttachment *attachment)
|
|
{
|
|
VkImageAspectFlags aspectMask = 0;
|
|
VkImageLayout imageLayout;
|
|
|
|
attachment->format = format;
|
|
|
|
if (usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
|
|
{
|
|
aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
|
}
|
|
if (usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT)
|
|
{
|
|
aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
|
|
imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
|
}
|
|
|
|
assert(aspectMask > 0);
|
|
|
|
VkImageCreateInfo image = vks::initializers::imageCreateInfo();
|
|
image.imageType = VK_IMAGE_TYPE_2D;
|
|
image.format = format;
|
|
image.extent.width = offScreenFrameBuf.width;
|
|
image.extent.height = offScreenFrameBuf.height;
|
|
image.extent.depth = 1;
|
|
image.mipLevels = 1;
|
|
image.arrayLayers = 1;
|
|
image.samples = sampleCount;
|
|
image.tiling = VK_IMAGE_TILING_OPTIMAL;
|
|
image.usage = usage | VK_IMAGE_USAGE_SAMPLED_BIT;
|
|
|
|
VkMemoryAllocateInfo memAlloc = vks::initializers::memoryAllocateInfo();
|
|
VkMemoryRequirements memReqs;
|
|
|
|
VK_CHECK_RESULT(vkCreateImage(device, &image, nullptr, &attachment->image));
|
|
vkGetImageMemoryRequirements(device, attachment->image, &memReqs);
|
|
memAlloc.allocationSize = memReqs.size;
|
|
memAlloc.memoryTypeIndex = vulkanDevice->getMemoryType(memReqs.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
|
|
VK_CHECK_RESULT(vkAllocateMemory(device, &memAlloc, nullptr, &attachment->mem));
|
|
VK_CHECK_RESULT(vkBindImageMemory(device, attachment->image, attachment->mem, 0));
|
|
|
|
VkImageViewCreateInfo imageView = vks::initializers::imageViewCreateInfo();
|
|
imageView.viewType = VK_IMAGE_VIEW_TYPE_2D;
|
|
imageView.format = format;
|
|
imageView.subresourceRange = {};
|
|
imageView.subresourceRange.aspectMask = aspectMask;
|
|
imageView.subresourceRange.baseMipLevel = 0;
|
|
imageView.subresourceRange.levelCount = 1;
|
|
imageView.subresourceRange.baseArrayLayer = 0;
|
|
imageView.subresourceRange.layerCount = 1;
|
|
imageView.image = attachment->image;
|
|
VK_CHECK_RESULT(vkCreateImageView(device, &imageView, nullptr, &attachment->view));
|
|
}
|
|
|
|
// Prepare a new framebuffer for offscreen rendering
|
|
// The contents of this framebuffer are then
|
|
// blitted to our render target
|
|
void prepareOffscreenFramebuffer()
|
|
{
|
|
offScreenFrameBuf.width = this->width;
|
|
offScreenFrameBuf.height = this->height;
|
|
|
|
//offScreenFrameBuf.width = FB_DIM;
|
|
//offScreenFrameBuf.height = FB_DIM;
|
|
|
|
// Color attachments
|
|
|
|
// (World space) Positions
|
|
createAttachment(
|
|
VK_FORMAT_R16G16B16A16_SFLOAT,
|
|
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
|
|
&offScreenFrameBuf.position);
|
|
|
|
// (World space) Normals
|
|
createAttachment(
|
|
VK_FORMAT_R16G16B16A16_SFLOAT,
|
|
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
|
|
&offScreenFrameBuf.normal);
|
|
|
|
// Albedo (color)
|
|
createAttachment(
|
|
VK_FORMAT_R8G8B8A8_UNORM,
|
|
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
|
|
&offScreenFrameBuf.albedo);
|
|
|
|
// Depth attachment
|
|
|
|
// Find a suitable depth format
|
|
VkFormat attDepthFormat;
|
|
VkBool32 validDepthFormat = vks::tools::getSupportedDepthFormat(physicalDevice, &attDepthFormat);
|
|
assert(validDepthFormat);
|
|
|
|
createAttachment(
|
|
attDepthFormat,
|
|
VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
|
|
&offScreenFrameBuf.depth);
|
|
|
|
// Set up separate renderpass with references
|
|
// to the color and depth attachments
|
|
|
|
std::array<VkAttachmentDescription, 4> attachmentDescs = {};
|
|
|
|
// Init attachment properties
|
|
for (uint32_t i = 0; i < 4; ++i)
|
|
{
|
|
attachmentDescs[i].samples = sampleCount;
|
|
attachmentDescs[i].loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
|
|
attachmentDescs[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
|
attachmentDescs[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
|
attachmentDescs[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
|
if (i == 3)
|
|
{
|
|
attachmentDescs[i].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
attachmentDescs[i].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
|
}
|
|
else
|
|
{
|
|
attachmentDescs[i].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
attachmentDescs[i].finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
}
|
|
}
|
|
|
|
// Formats
|
|
attachmentDescs[0].format = offScreenFrameBuf.position.format;
|
|
attachmentDescs[1].format = offScreenFrameBuf.normal.format;
|
|
attachmentDescs[2].format = offScreenFrameBuf.albedo.format;
|
|
attachmentDescs[3].format = offScreenFrameBuf.depth.format;
|
|
|
|
std::vector<VkAttachmentReference> colorReferences;
|
|
colorReferences.push_back({ 0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL });
|
|
colorReferences.push_back({ 1, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL });
|
|
colorReferences.push_back({ 2, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL });
|
|
|
|
VkAttachmentReference depthReference = {};
|
|
depthReference.attachment = 3;
|
|
depthReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
|
|
|
|
VkSubpassDescription subpass = {};
|
|
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
|
|
subpass.pColorAttachments = colorReferences.data();
|
|
subpass.colorAttachmentCount = static_cast<uint32_t>(colorReferences.size());
|
|
subpass.pDepthStencilAttachment = &depthReference;
|
|
|
|
// Use subpass dependencies for attachment layput transitions
|
|
std::array<VkSubpassDependency, 2> dependencies;
|
|
|
|
dependencies[0].srcSubpass = VK_SUBPASS_EXTERNAL;
|
|
dependencies[0].dstSubpass = 0;
|
|
dependencies[0].srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
|
dependencies[0].dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
dependencies[0].srcAccessMask = VK_ACCESS_MEMORY_READ_BIT;
|
|
dependencies[0].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
dependencies[0].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
|
|
|
|
dependencies[1].srcSubpass = 0;
|
|
dependencies[1].dstSubpass = VK_SUBPASS_EXTERNAL;
|
|
dependencies[1].srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
dependencies[1].dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
|
dependencies[1].srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
dependencies[1].dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
|
|
dependencies[1].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
|
|
|
|
VkRenderPassCreateInfo renderPassInfo = {};
|
|
renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
|
|
renderPassInfo.pAttachments = attachmentDescs.data();
|
|
renderPassInfo.attachmentCount = static_cast<uint32_t>(attachmentDescs.size());
|
|
renderPassInfo.subpassCount = 1;
|
|
renderPassInfo.pSubpasses = &subpass;
|
|
renderPassInfo.dependencyCount = 2;
|
|
renderPassInfo.pDependencies = dependencies.data();
|
|
|
|
VK_CHECK_RESULT(vkCreateRenderPass(device, &renderPassInfo, nullptr, &offScreenFrameBuf.renderPass));
|
|
|
|
std::array<VkImageView,4> attachments;
|
|
attachments[0] = offScreenFrameBuf.position.view;
|
|
attachments[1] = offScreenFrameBuf.normal.view;
|
|
attachments[2] = offScreenFrameBuf.albedo.view;
|
|
attachments[3] = offScreenFrameBuf.depth.view;
|
|
|
|
VkFramebufferCreateInfo fbufCreateInfo = {};
|
|
fbufCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
|
|
fbufCreateInfo.pNext = NULL;
|
|
fbufCreateInfo.renderPass = offScreenFrameBuf.renderPass;
|
|
fbufCreateInfo.pAttachments = attachments.data();
|
|
fbufCreateInfo.attachmentCount = static_cast<uint32_t>(attachments.size());
|
|
fbufCreateInfo.width = offScreenFrameBuf.width;
|
|
fbufCreateInfo.height = offScreenFrameBuf.height;
|
|
fbufCreateInfo.layers = 1;
|
|
VK_CHECK_RESULT(vkCreateFramebuffer(device, &fbufCreateInfo, nullptr, &offScreenFrameBuf.frameBuffer));
|
|
|
|
// Create sampler to sample from the color attachments
|
|
VkSamplerCreateInfo sampler = vks::initializers::samplerCreateInfo();
|
|
sampler.magFilter = VK_FILTER_NEAREST;
|
|
sampler.minFilter = VK_FILTER_NEAREST;
|
|
sampler.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
|
|
sampler.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
|
|
sampler.addressModeV = sampler.addressModeU;
|
|
sampler.addressModeW = sampler.addressModeU;
|
|
sampler.mipLodBias = 0.0f;
|
|
sampler.maxAnisotropy = 1.0f;
|
|
sampler.minLod = 0.0f;
|
|
sampler.minLod = 0.0f;
|
|
sampler.maxLod = 1.0f;
|
|
sampler.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
|
|
VK_CHECK_RESULT(vkCreateSampler(device, &sampler, nullptr, &colorSampler));
|
|
}
|
|
|
|
// Build command buffer for rendering the scene to the offscreen frame buffer attachments
|
|
void buildDeferredCommandBuffer()
|
|
{
|
|
if (offScreenCmdBuffer == VK_NULL_HANDLE) {
|
|
offScreenCmdBuffer = VulkanExampleBase::createCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY, false);
|
|
}
|
|
|
|
// Create a semaphore used to synchronize offscreen rendering and usage
|
|
if (offscreenSemaphore == VK_NULL_HANDLE) {
|
|
VkSemaphoreCreateInfo semaphoreCreateInfo = vks::initializers::semaphoreCreateInfo();
|
|
VK_CHECK_RESULT(vkCreateSemaphore(device, &semaphoreCreateInfo, nullptr, &offscreenSemaphore));
|
|
}
|
|
|
|
VkCommandBufferBeginInfo cmdBufInfo = vks::initializers::commandBufferBeginInfo();
|
|
|
|
// Clear values for all attachments written in the fragment sahder
|
|
std::array<VkClearValue,4> clearValues;
|
|
clearValues[0].color = clearValues[1].color = { { 0.0f, 0.0f, 0.0f, 0.0f } };
|
|
clearValues[2].color = { { 0.0f, 0.0f, 0.0f, 0.0f } };
|
|
clearValues[3].depthStencil = { 1.0f, 0 };
|
|
|
|
VkRenderPassBeginInfo renderPassBeginInfo = vks::initializers::renderPassBeginInfo();
|
|
renderPassBeginInfo.renderPass = offScreenFrameBuf.renderPass;
|
|
renderPassBeginInfo.framebuffer = offScreenFrameBuf.frameBuffer;
|
|
renderPassBeginInfo.renderArea.extent.width = offScreenFrameBuf.width;
|
|
renderPassBeginInfo.renderArea.extent.height = offScreenFrameBuf.height;
|
|
renderPassBeginInfo.clearValueCount = static_cast<uint32_t>(clearValues.size());
|
|
renderPassBeginInfo.pClearValues = clearValues.data();
|
|
|
|
VK_CHECK_RESULT(vkBeginCommandBuffer(offScreenCmdBuffer, &cmdBufInfo));
|
|
|
|
vkCmdBeginRenderPass(offScreenCmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
|
|
|
|
VkViewport viewport = vks::initializers::viewport((float)offScreenFrameBuf.width, (float)offScreenFrameBuf.height, 0.0f, 1.0f);
|
|
vkCmdSetViewport(offScreenCmdBuffer, 0, 1, &viewport);
|
|
|
|
VkRect2D scissor = vks::initializers::rect2D(offScreenFrameBuf.width, offScreenFrameBuf.height, 0, 0);
|
|
vkCmdSetScissor(offScreenCmdBuffer, 0, 1, &scissor);
|
|
|
|
vkCmdBindPipeline(offScreenCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, useSampleShading ? pipelines.offscreenSampleShading : pipelines.offscreen);
|
|
|
|
VkDeviceSize offsets[1] = { 0 };
|
|
|
|
// Background
|
|
vkCmdBindDescriptorSets(offScreenCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayouts.offscreen, 0, 1, &descriptorSets.floor, 0, NULL);
|
|
vkCmdBindVertexBuffers(offScreenCmdBuffer, VERTEX_BUFFER_BIND_ID, 1, &models.floor.vertices.buffer, offsets);
|
|
vkCmdBindIndexBuffer(offScreenCmdBuffer, models.floor.indices.buffer, 0, VK_INDEX_TYPE_UINT32);
|
|
vkCmdDrawIndexed(offScreenCmdBuffer, models.floor.indexCount, 1, 0, 0, 0);
|
|
|
|
// Object
|
|
vkCmdBindDescriptorSets(offScreenCmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayouts.offscreen, 0, 1, &descriptorSets.model, 0, NULL);
|
|
vkCmdBindVertexBuffers(offScreenCmdBuffer, VERTEX_BUFFER_BIND_ID, 1, &models.model.vertices.buffer, offsets);
|
|
vkCmdBindIndexBuffer(offScreenCmdBuffer, models.model.indices.buffer, 0, VK_INDEX_TYPE_UINT32);
|
|
vkCmdDrawIndexed(offScreenCmdBuffer, models.model.indexCount, 3, 0, 0, 0);
|
|
|
|
vkCmdEndRenderPass(offScreenCmdBuffer);
|
|
|
|
VK_CHECK_RESULT(vkEndCommandBuffer(offScreenCmdBuffer));
|
|
}
|
|
|
|
void buildCommandBuffers()
|
|
{
|
|
VkCommandBufferBeginInfo cmdBufInfo = vks::initializers::commandBufferBeginInfo();
|
|
|
|
VkClearValue clearValues[2];
|
|
clearValues[0].color = { { 1.0f, 1.0f, 1.0f, 0.0f } };
|
|
clearValues[1].depthStencil = { 1.0f, 0 };
|
|
|
|
VkRenderPassBeginInfo renderPassBeginInfo = vks::initializers::renderPassBeginInfo();
|
|
renderPassBeginInfo.renderPass = renderPass;
|
|
renderPassBeginInfo.renderArea.offset.x = 0;
|
|
renderPassBeginInfo.renderArea.offset.y = 0;
|
|
renderPassBeginInfo.renderArea.extent.width = width;
|
|
renderPassBeginInfo.renderArea.extent.height = height;
|
|
renderPassBeginInfo.clearValueCount = 2;
|
|
renderPassBeginInfo.pClearValues = clearValues;
|
|
|
|
for (int32_t i = 0; i < drawCmdBuffers.size(); ++i)
|
|
{
|
|
// Set target frame buffer
|
|
renderPassBeginInfo.framebuffer = frameBuffers[i];
|
|
|
|
VK_CHECK_RESULT(vkBeginCommandBuffer(drawCmdBuffers[i], &cmdBufInfo));
|
|
|
|
vkCmdBeginRenderPass(drawCmdBuffers[i], &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
|
|
|
|
VkViewport viewport = vks::initializers::viewport((float)width, (float)height, 0.0f, 1.0f);
|
|
vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport);
|
|
|
|
VkRect2D scissor = vks::initializers::rect2D(width, height, 0, 0);
|
|
vkCmdSetScissor(drawCmdBuffers[i], 0, 1, &scissor);
|
|
|
|
vkCmdBindDescriptorSets(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayouts.deferred, 0, 1, &descriptorSet, 0, NULL);
|
|
|
|
if (debugDisplay)
|
|
{
|
|
vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.debug);
|
|
vkCmdDraw(drawCmdBuffers[i], 3, 1, 0, 0);
|
|
// Move viewport to display final composition in lower right corner
|
|
viewport.x = viewport.width * 0.5f;
|
|
viewport.y = viewport.height * 0.5f;
|
|
viewport.width = (float)width * 0.5f;
|
|
viewport.height = (float)height * 0.5f;
|
|
vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport);
|
|
}
|
|
|
|
camera.updateAspectRatio((float)viewport.width / (float)viewport.height);
|
|
|
|
// Final composition as full screen quad
|
|
vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, useMSAA ? pipelines.deferred : pipelines.deferredNoMSAA);
|
|
vkCmdDraw(drawCmdBuffers[i], 3, 1, 0, 0);
|
|
|
|
drawUI(drawCmdBuffers[i]);
|
|
|
|
vkCmdEndRenderPass(drawCmdBuffers[i]);
|
|
|
|
VK_CHECK_RESULT(vkEndCommandBuffer(drawCmdBuffers[i]));
|
|
}
|
|
}
|
|
|
|
void loadAssets()
|
|
{
|
|
models.model.loadFromFile(getAssetPath() + "models/armor/armor.dae", vertexLayout, 1.0f, vulkanDevice, queue);
|
|
|
|
vks::ModelCreateInfo modelCreateInfo;
|
|
modelCreateInfo.scale = glm::vec3(15.0f);
|
|
modelCreateInfo.uvscale = glm::vec2(8.0f, 8.0f);
|
|
modelCreateInfo.center = glm::vec3(0.0f, 2.3f, 0.0f);
|
|
models.floor.loadFromFile(getAssetPath() + "models/openbox.dae", vertexLayout, &modelCreateInfo, vulkanDevice, queue);
|
|
|
|
// Textures
|
|
std::string texFormatSuffix;
|
|
VkFormat texFormat;
|
|
// Get supported compressed texture format
|
|
if (vulkanDevice->features.textureCompressionBC) {
|
|
texFormatSuffix = "_bc3_unorm";
|
|
texFormat = VK_FORMAT_BC3_UNORM_BLOCK;
|
|
}
|
|
else if (vulkanDevice->features.textureCompressionASTC_LDR) {
|
|
texFormatSuffix = "_astc_8x8_unorm";
|
|
texFormat = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
|
|
}
|
|
else if (vulkanDevice->features.textureCompressionETC2) {
|
|
texFormatSuffix = "_etc2_unorm";
|
|
texFormat = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
|
|
}
|
|
else {
|
|
vks::tools::exitFatal("Device does not support any compressed texture format!", VK_ERROR_FEATURE_NOT_PRESENT);
|
|
}
|
|
|
|
textures.model.colorMap.loadFromFile(getAssetPath() + "models/armor/color" + texFormatSuffix + ".ktx", texFormat, vulkanDevice, queue);
|
|
textures.model.normalMap.loadFromFile(getAssetPath() + "models/armor/normal" + texFormatSuffix + ".ktx", texFormat, vulkanDevice, queue);
|
|
textures.floor.colorMap.loadFromFile(getAssetPath() + "textures/stonefloor02_color" + texFormatSuffix + ".ktx", texFormat, vulkanDevice, queue);
|
|
textures.floor.normalMap.loadFromFile(getAssetPath() + "textures/stonefloor02_normal" + texFormatSuffix + ".ktx", texFormat, vulkanDevice, queue);
|
|
}
|
|
|
|
void setupVertexDescriptions()
|
|
{
|
|
// Binding description
|
|
vertices.bindingDescriptions.resize(1);
|
|
vertices.bindingDescriptions[0] =
|
|
vks::initializers::vertexInputBindingDescription(
|
|
VERTEX_BUFFER_BIND_ID,
|
|
vertexLayout.stride(),
|
|
VK_VERTEX_INPUT_RATE_VERTEX);
|
|
|
|
// Attribute descriptions
|
|
vertices.attributeDescriptions.resize(5);
|
|
// Location 0: Position
|
|
vertices.attributeDescriptions[0] =
|
|
vks::initializers::vertexInputAttributeDescription(
|
|
VERTEX_BUFFER_BIND_ID,
|
|
0,
|
|
VK_FORMAT_R32G32B32_SFLOAT,
|
|
0);
|
|
// Location 1: Texture coordinates
|
|
vertices.attributeDescriptions[1] =
|
|
vks::initializers::vertexInputAttributeDescription(
|
|
VERTEX_BUFFER_BIND_ID,
|
|
1,
|
|
VK_FORMAT_R32G32_SFLOAT,
|
|
sizeof(float) * 3);
|
|
// Location 2: Color
|
|
vertices.attributeDescriptions[2] =
|
|
vks::initializers::vertexInputAttributeDescription(
|
|
VERTEX_BUFFER_BIND_ID,
|
|
2,
|
|
VK_FORMAT_R32G32B32_SFLOAT,
|
|
sizeof(float) * 5);
|
|
// Location 3: Normal
|
|
vertices.attributeDescriptions[3] =
|
|
vks::initializers::vertexInputAttributeDescription(
|
|
VERTEX_BUFFER_BIND_ID,
|
|
3,
|
|
VK_FORMAT_R32G32B32_SFLOAT,
|
|
sizeof(float) * 8);
|
|
// Location 4: Tangent
|
|
vertices.attributeDescriptions[4] =
|
|
vks::initializers::vertexInputAttributeDescription(
|
|
VERTEX_BUFFER_BIND_ID,
|
|
4,
|
|
VK_FORMAT_R32G32B32_SFLOAT,
|
|
sizeof(float) * 11);
|
|
|
|
vertices.inputState = vks::initializers::pipelineVertexInputStateCreateInfo();
|
|
vertices.inputState.vertexBindingDescriptionCount = static_cast<uint32_t>(vertices.bindingDescriptions.size());
|
|
vertices.inputState.pVertexBindingDescriptions = vertices.bindingDescriptions.data();
|
|
vertices.inputState.vertexAttributeDescriptionCount = static_cast<uint32_t>(vertices.attributeDescriptions.size());
|
|
vertices.inputState.pVertexAttributeDescriptions = vertices.attributeDescriptions.data();
|
|
}
|
|
|
|
void setupDescriptorPool()
|
|
{
|
|
std::vector<VkDescriptorPoolSize> poolSizes =
|
|
{
|
|
vks::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 8),
|
|
vks::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 9)
|
|
};
|
|
|
|
VkDescriptorPoolCreateInfo descriptorPoolInfo =
|
|
vks::initializers::descriptorPoolCreateInfo(
|
|
static_cast<uint32_t>(poolSizes.size()),
|
|
poolSizes.data(),
|
|
3);
|
|
|
|
VK_CHECK_RESULT(vkCreateDescriptorPool(device, &descriptorPoolInfo, nullptr, &descriptorPool));
|
|
}
|
|
|
|
void setupDescriptorSetLayout()
|
|
{
|
|
// Deferred shading layout
|
|
std::vector<VkDescriptorSetLayoutBinding> setLayoutBindings =
|
|
{
|
|
// Binding 0 : Vertex shader uniform buffer
|
|
vks::initializers::descriptorSetLayoutBinding(
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
VK_SHADER_STAGE_VERTEX_BIT,
|
|
0),
|
|
// Binding 1 : Position texture target / Scene colormap
|
|
vks::initializers::descriptorSetLayoutBinding(
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
1),
|
|
// Binding 2 : Normals texture target
|
|
vks::initializers::descriptorSetLayoutBinding(
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
2),
|
|
// Binding 3 : Albedo texture target
|
|
vks::initializers::descriptorSetLayoutBinding(
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
3),
|
|
// Binding 4 : Fragment shader uniform buffer
|
|
vks::initializers::descriptorSetLayoutBinding(
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
VK_SHADER_STAGE_FRAGMENT_BIT,
|
|
4),
|
|
};
|
|
|
|
VkDescriptorSetLayoutCreateInfo descriptorLayout =
|
|
vks::initializers::descriptorSetLayoutCreateInfo(
|
|
setLayoutBindings.data(),
|
|
static_cast<uint32_t>(setLayoutBindings.size()));
|
|
|
|
VK_CHECK_RESULT(vkCreateDescriptorSetLayout(device, &descriptorLayout, nullptr, &descriptorSetLayout));
|
|
|
|
VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo =
|
|
vks::initializers::pipelineLayoutCreateInfo(
|
|
&descriptorSetLayout,
|
|
1);
|
|
|
|
VK_CHECK_RESULT(vkCreatePipelineLayout(device, &pPipelineLayoutCreateInfo, nullptr, &pipelineLayouts.deferred));
|
|
|
|
// Offscreen (scene) rendering pipeline layout
|
|
VK_CHECK_RESULT(vkCreatePipelineLayout(device, &pPipelineLayoutCreateInfo, nullptr, &pipelineLayouts.offscreen));
|
|
}
|
|
|
|
void setupDescriptorSet()
|
|
{
|
|
std::vector<VkWriteDescriptorSet> writeDescriptorSets;
|
|
|
|
// Textured quad descriptor set
|
|
VkDescriptorSetAllocateInfo allocInfo =
|
|
vks::initializers::descriptorSetAllocateInfo(
|
|
descriptorPool,
|
|
&descriptorSetLayout,
|
|
1);
|
|
|
|
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSet));
|
|
|
|
// Image descriptors for the offscreen color attachments
|
|
VkDescriptorImageInfo texDescriptorPosition =
|
|
vks::initializers::descriptorImageInfo(
|
|
colorSampler,
|
|
offScreenFrameBuf.position.view,
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
|
|
|
VkDescriptorImageInfo texDescriptorNormal =
|
|
vks::initializers::descriptorImageInfo(
|
|
colorSampler,
|
|
offScreenFrameBuf.normal.view,
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
|
|
|
VkDescriptorImageInfo texDescriptorAlbedo =
|
|
vks::initializers::descriptorImageInfo(
|
|
colorSampler,
|
|
offScreenFrameBuf.albedo.view,
|
|
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
|
|
|
writeDescriptorSets = {
|
|
// Binding 0 : Vertex shader uniform buffer
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSet,
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
0,
|
|
&uniformBuffers.vsFullScreen.descriptor),
|
|
// Binding 1 : Position texture target
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSet,
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
1,
|
|
&texDescriptorPosition),
|
|
// Binding 2 : Normals texture target
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSet,
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
2,
|
|
&texDescriptorNormal),
|
|
// Binding 3 : Albedo texture target
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSet,
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
3,
|
|
&texDescriptorAlbedo),
|
|
// Binding 4 : Fragment shader uniform buffer
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSet,
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
4,
|
|
&uniformBuffers.fsLights.descriptor),
|
|
};
|
|
|
|
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, NULL);
|
|
|
|
// Offscreen (scene)
|
|
|
|
// Model
|
|
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.model));
|
|
writeDescriptorSets =
|
|
{
|
|
// Binding 0: Vertex shader uniform buffer
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSets.model,
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
0,
|
|
&uniformBuffers.vsOffscreen.descriptor),
|
|
// Binding 1: Color map
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSets.model,
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
1,
|
|
&textures.model.colorMap.descriptor),
|
|
// Binding 2: Normal map
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSets.model,
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
2,
|
|
&textures.model.normalMap.descriptor)
|
|
};
|
|
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, NULL);
|
|
|
|
// Backbround
|
|
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSets.floor));
|
|
writeDescriptorSets =
|
|
{
|
|
// Binding 0: Vertex shader uniform buffer
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSets.floor,
|
|
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
|
|
0,
|
|
&uniformBuffers.vsOffscreen.descriptor),
|
|
// Binding 1: Color map
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSets.floor,
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
1,
|
|
&textures.floor.colorMap.descriptor),
|
|
// Binding 2: Normal map
|
|
vks::initializers::writeDescriptorSet(
|
|
descriptorSets.floor,
|
|
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
|
|
2,
|
|
&textures.floor.normalMap.descriptor)
|
|
};
|
|
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, NULL);
|
|
}
|
|
|
|
void preparePipelines()
|
|
{
|
|
VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
|
|
vks::initializers::pipelineInputAssemblyStateCreateInfo(
|
|
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
|
|
0,
|
|
VK_FALSE);
|
|
|
|
VkPipelineRasterizationStateCreateInfo rasterizationState =
|
|
vks::initializers::pipelineRasterizationStateCreateInfo(
|
|
VK_POLYGON_MODE_FILL,
|
|
VK_CULL_MODE_BACK_BIT,
|
|
VK_FRONT_FACE_CLOCKWISE,
|
|
0);
|
|
|
|
VkPipelineColorBlendAttachmentState blendAttachmentState =
|
|
vks::initializers::pipelineColorBlendAttachmentState(
|
|
0xf,
|
|
VK_FALSE);
|
|
|
|
VkPipelineColorBlendStateCreateInfo colorBlendState =
|
|
vks::initializers::pipelineColorBlendStateCreateInfo(
|
|
1,
|
|
&blendAttachmentState);
|
|
|
|
VkPipelineDepthStencilStateCreateInfo depthStencilState =
|
|
vks::initializers::pipelineDepthStencilStateCreateInfo(
|
|
VK_TRUE,
|
|
VK_TRUE,
|
|
VK_COMPARE_OP_LESS_OR_EQUAL);
|
|
|
|
VkPipelineViewportStateCreateInfo viewportState =
|
|
vks::initializers::pipelineViewportStateCreateInfo(1, 1, 0);
|
|
|
|
VkPipelineMultisampleStateCreateInfo multisampleState =
|
|
vks::initializers::pipelineMultisampleStateCreateInfo(
|
|
VK_SAMPLE_COUNT_1_BIT,
|
|
0);
|
|
|
|
std::vector<VkDynamicState> dynamicStateEnables = {
|
|
VK_DYNAMIC_STATE_VIEWPORT,
|
|
VK_DYNAMIC_STATE_SCISSOR
|
|
};
|
|
VkPipelineDynamicStateCreateInfo dynamicState =
|
|
vks::initializers::pipelineDynamicStateCreateInfo(
|
|
dynamicStateEnables.data(),
|
|
static_cast<uint32_t>(dynamicStateEnables.size()),
|
|
0);
|
|
|
|
// Final fullscreen pass pipeline
|
|
std::array<VkPipelineShaderStageCreateInfo, 2> shaderStages;
|
|
|
|
VkGraphicsPipelineCreateInfo pipelineCreateInfo =
|
|
vks::initializers::pipelineCreateInfo(
|
|
pipelineLayouts.deferred,
|
|
renderPass,
|
|
0);
|
|
|
|
pipelineCreateInfo.pInputAssemblyState = &inputAssemblyState;
|
|
pipelineCreateInfo.pRasterizationState = &rasterizationState;
|
|
pipelineCreateInfo.pColorBlendState = &colorBlendState;
|
|
pipelineCreateInfo.pMultisampleState = &multisampleState;
|
|
pipelineCreateInfo.pViewportState = &viewportState;
|
|
pipelineCreateInfo.pDepthStencilState = &depthStencilState;
|
|
pipelineCreateInfo.pDynamicState = &dynamicState;
|
|
pipelineCreateInfo.stageCount = static_cast<uint32_t>(shaderStages.size());
|
|
pipelineCreateInfo.pStages = shaderStages.data();
|
|
|
|
// Deferred
|
|
|
|
// Empty vertex input state, quads are generated by the vertex shader
|
|
VkPipelineVertexInputStateCreateInfo emptyInputState = vks::initializers::pipelineVertexInputStateCreateInfo();
|
|
pipelineCreateInfo.pVertexInputState = &emptyInputState;
|
|
pipelineCreateInfo.layout = pipelineLayouts.deferred;
|
|
|
|
// Use specialization constants to pass number of samples to the shader (used for MSAA resolve)
|
|
VkSpecializationMapEntry specializationEntry{};
|
|
specializationEntry.constantID = 0;
|
|
specializationEntry.offset = 0;
|
|
specializationEntry.size = sizeof(uint32_t);
|
|
|
|
uint32_t specializationData = sampleCount;
|
|
|
|
VkSpecializationInfo specializationInfo;
|
|
specializationInfo.mapEntryCount = 1;
|
|
specializationInfo.pMapEntries = &specializationEntry;
|
|
specializationInfo.dataSize = sizeof(specializationData);
|
|
specializationInfo.pData = &specializationData;
|
|
|
|
// With MSAA
|
|
shaderStages[0] = loadShader(getAssetPath() + "shaders/deferredmultisampling/deferred.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
|
|
shaderStages[1] = loadShader(getAssetPath() + "shaders/deferredmultisampling/deferred.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
|
|
shaderStages[1].pSpecializationInfo = &specializationInfo;
|
|
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.deferred));
|
|
|
|
// No MSAA (1 sample)
|
|
specializationData = 1;
|
|
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.deferredNoMSAA));
|
|
|
|
// Debug display pipeline
|
|
specializationData = sampleCount;
|
|
shaderStages[0] = loadShader(getAssetPath() + "shaders/deferredmultisampling/debug.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
|
|
shaderStages[1] = loadShader(getAssetPath() + "shaders/deferredmultisampling/debug.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
|
|
shaderStages[1].pSpecializationInfo = &specializationInfo;
|
|
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.debug));
|
|
|
|
// Offscreen scene rendering pipeline
|
|
pipelineCreateInfo.pVertexInputState = &vertices.inputState;
|
|
|
|
shaderStages[0] = loadShader(getAssetPath() + "shaders/deferredmultisampling/mrt.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
|
|
shaderStages[1] = loadShader(getAssetPath() + "shaders/deferredmultisampling/mrt.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
|
|
|
|
//rasterizationState.polygonMode = VK_POLYGON_MODE_LINE;
|
|
//rasterizationState.lineWidth = 2.0f;
|
|
multisampleState.rasterizationSamples = sampleCount;
|
|
multisampleState.alphaToCoverageEnable = VK_TRUE;
|
|
|
|
// Separate render pass
|
|
pipelineCreateInfo.renderPass = offScreenFrameBuf.renderPass;
|
|
|
|
// Separate layout
|
|
pipelineCreateInfo.layout = pipelineLayouts.offscreen;
|
|
|
|
// Blend attachment states required for all color attachments
|
|
// This is important, as color write mask will otherwise be 0x0 and you
|
|
// won't see anything rendered to the attachment
|
|
std::array<VkPipelineColorBlendAttachmentState, 3> blendAttachmentStates = {
|
|
vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE),
|
|
vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE),
|
|
vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE)
|
|
};
|
|
|
|
colorBlendState.attachmentCount = static_cast<uint32_t>(blendAttachmentStates.size());
|
|
colorBlendState.pAttachments = blendAttachmentStates.data();
|
|
|
|
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.offscreen));
|
|
|
|
multisampleState.sampleShadingEnable = VK_TRUE;
|
|
multisampleState.minSampleShading = 0.25f;
|
|
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.offscreenSampleShading));
|
|
}
|
|
|
|
// Prepare and initialize uniform buffer containing shader uniforms
|
|
void prepareUniformBuffers()
|
|
{
|
|
// Fullscreen vertex shader
|
|
VK_CHECK_RESULT(vulkanDevice->createBuffer(
|
|
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
|
|
&uniformBuffers.vsFullScreen,
|
|
sizeof(uboVS)));
|
|
|
|
// Deferred vertex shader
|
|
VK_CHECK_RESULT(vulkanDevice->createBuffer(
|
|
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
|
|
&uniformBuffers.vsOffscreen,
|
|
sizeof(uboOffscreenVS)));
|
|
|
|
// Deferred fragment shader
|
|
VK_CHECK_RESULT(vulkanDevice->createBuffer(
|
|
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
|
|
&uniformBuffers.fsLights,
|
|
sizeof(uboFragmentLights)));
|
|
|
|
// Map persistent
|
|
VK_CHECK_RESULT(uniformBuffers.vsFullScreen.map());
|
|
VK_CHECK_RESULT(uniformBuffers.vsOffscreen.map());
|
|
VK_CHECK_RESULT(uniformBuffers.fsLights.map());
|
|
|
|
// Init some values
|
|
uboOffscreenVS.instancePos[0] = glm::vec4(0.0f);
|
|
uboOffscreenVS.instancePos[1] = glm::vec4(-4.0f, 0.0, -4.0f, 0.0f);
|
|
uboOffscreenVS.instancePos[2] = glm::vec4(4.0f, 0.0, -4.0f, 0.0f);
|
|
|
|
// Update
|
|
updateUniformBuffersScreen();
|
|
updateUniformBufferDeferredMatrices();
|
|
updateUniformBufferDeferredLights();
|
|
}
|
|
|
|
void updateUniformBuffersScreen()
|
|
{
|
|
if (debugDisplay)
|
|
{
|
|
uboVS.projection = glm::ortho(0.0f, 2.0f, 0.0f, 2.0f, -1.0f, 1.0f);
|
|
}
|
|
else
|
|
{
|
|
uboVS.projection = glm::ortho(0.0f, 1.0f, 0.0f, 1.0f, -1.0f, 1.0f);
|
|
}
|
|
uboVS.model = glm::mat4(1.0f);
|
|
|
|
memcpy(uniformBuffers.vsFullScreen.mapped, &uboVS, sizeof(uboVS));
|
|
}
|
|
|
|
void updateUniformBufferDeferredMatrices()
|
|
{
|
|
uboOffscreenVS.projection = camera.matrices.perspective;
|
|
uboOffscreenVS.view = camera.matrices.view;
|
|
uboOffscreenVS.model = glm::mat4(1.0f);
|
|
memcpy(uniformBuffers.vsOffscreen.mapped, &uboOffscreenVS, sizeof(uboOffscreenVS));
|
|
}
|
|
|
|
// Update fragment shader light position uniform block
|
|
void updateUniformBufferDeferredLights()
|
|
{
|
|
// White
|
|
uboFragmentLights.lights[0].position = glm::vec4(0.0f, 0.0f, 1.0f, 0.0f);
|
|
uboFragmentLights.lights[0].color = glm::vec3(1.5f);
|
|
uboFragmentLights.lights[0].radius = 15.0f * 0.25f;
|
|
// Red
|
|
uboFragmentLights.lights[1].position = glm::vec4(-2.0f, 0.0f, 0.0f, 0.0f);
|
|
uboFragmentLights.lights[1].color = glm::vec3(1.0f, 0.0f, 0.0f);
|
|
uboFragmentLights.lights[1].radius = 15.0f;
|
|
// Blue
|
|
uboFragmentLights.lights[2].position = glm::vec4(2.0f, 1.0f, 0.0f, 0.0f);
|
|
uboFragmentLights.lights[2].color = glm::vec3(0.0f, 0.0f, 2.5f);
|
|
uboFragmentLights.lights[2].radius = 5.0f;
|
|
// Yellow
|
|
uboFragmentLights.lights[3].position = glm::vec4(0.0f, 0.9f, 0.5f, 0.0f);
|
|
uboFragmentLights.lights[3].color = glm::vec3(1.0f, 1.0f, 0.0f);
|
|
uboFragmentLights.lights[3].radius = 2.0f;
|
|
// Green
|
|
uboFragmentLights.lights[4].position = glm::vec4(0.0f, 0.5f, 0.0f, 0.0f);
|
|
uboFragmentLights.lights[4].color = glm::vec3(0.0f, 1.0f, 0.2f);
|
|
uboFragmentLights.lights[4].radius = 5.0f;
|
|
// Yellow
|
|
uboFragmentLights.lights[5].position = glm::vec4(0.0f, 1.0f, 0.0f, 0.0f);
|
|
uboFragmentLights.lights[5].color = glm::vec3(1.0f, 0.7f, 0.3f);
|
|
uboFragmentLights.lights[5].radius = 25.0f;
|
|
|
|
uboFragmentLights.lights[0].position.x = sin(glm::radians(360.0f * timer)) * 5.0f;
|
|
uboFragmentLights.lights[0].position.z = cos(glm::radians(360.0f * timer)) * 5.0f;
|
|
|
|
uboFragmentLights.lights[1].position.x = -4.0f + sin(glm::radians(360.0f * timer) + 45.0f) * 2.0f;
|
|
uboFragmentLights.lights[1].position.z = 0.0f + cos(glm::radians(360.0f * timer) + 45.0f) * 2.0f;
|
|
|
|
uboFragmentLights.lights[2].position.x = 4.0f + sin(glm::radians(360.0f * timer)) * 2.0f;
|
|
uboFragmentLights.lights[2].position.z = 0.0f + cos(glm::radians(360.0f * timer)) * 2.0f;
|
|
|
|
uboFragmentLights.lights[4].position.x = 0.0f + sin(glm::radians(360.0f * timer + 90.0f)) * 5.0f;
|
|
uboFragmentLights.lights[4].position.z = 0.0f - cos(glm::radians(360.0f * timer + 45.0f)) * 5.0f;
|
|
|
|
uboFragmentLights.lights[5].position.x = 0.0f + sin(glm::radians(-360.0f * timer + 135.0f)) * 10.0f;
|
|
uboFragmentLights.lights[5].position.z = 0.0f - cos(glm::radians(-360.0f * timer - 45.0f)) * 10.0f;
|
|
|
|
// Current view position
|
|
uboFragmentLights.viewPos = glm::vec4(camera.position, 0.0f) * glm::vec4(-1.0f, 1.0f, -1.0f, 1.0f);
|
|
|
|
memcpy(uniformBuffers.fsLights.mapped, &uboFragmentLights, sizeof(uboFragmentLights));
|
|
}
|
|
|
|
void draw()
|
|
{
|
|
VulkanExampleBase::prepareFrame();
|
|
|
|
// Offscreen rendering
|
|
|
|
// Wait for swap chain presentation to finish
|
|
submitInfo.pWaitSemaphores = &semaphores.presentComplete;
|
|
// Signal ready with offscreen semaphore
|
|
submitInfo.pSignalSemaphores = &offscreenSemaphore;
|
|
|
|
// Submit work
|
|
submitInfo.commandBufferCount = 1;
|
|
submitInfo.pCommandBuffers = &offScreenCmdBuffer;
|
|
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
|
|
|
|
// Scene rendering
|
|
|
|
// Wait for offscreen semaphore
|
|
submitInfo.pWaitSemaphores = &offscreenSemaphore;
|
|
// Signal ready with render complete semaphpre
|
|
submitInfo.pSignalSemaphores = &semaphores.renderComplete;
|
|
|
|
// Submit work
|
|
submitInfo.pCommandBuffers = &drawCmdBuffers[currentBuffer];
|
|
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
|
|
|
|
VulkanExampleBase::submitFrame();
|
|
}
|
|
|
|
void prepare()
|
|
{
|
|
VulkanExampleBase::prepare();
|
|
sampleCount = getMaxUsableSampleCount();
|
|
loadAssets();
|
|
setupVertexDescriptions();
|
|
prepareOffscreenFramebuffer();
|
|
prepareUniformBuffers();
|
|
setupDescriptorSetLayout();
|
|
preparePipelines();
|
|
setupDescriptorPool();
|
|
setupDescriptorSet();
|
|
buildCommandBuffers();
|
|
buildDeferredCommandBuffer();
|
|
prepared = true;
|
|
}
|
|
|
|
virtual void render()
|
|
{
|
|
if (!prepared)
|
|
return;
|
|
draw();
|
|
updateUniformBufferDeferredLights();
|
|
}
|
|
|
|
virtual void viewChanged()
|
|
{
|
|
updateUniformBufferDeferredMatrices();
|
|
uboFragmentLights.windowSize = glm::ivec2(width, height);
|
|
}
|
|
|
|
virtual void OnUpdateUIOverlay(vks::UIOverlay *overlay)
|
|
{
|
|
if (overlay->header("Settings")) {
|
|
if (overlay->checkBox("Display render targets", &debugDisplay)) {
|
|
buildCommandBuffers();
|
|
updateUniformBuffersScreen();
|
|
}
|
|
if (overlay->checkBox("MSAA", &useMSAA)) {
|
|
buildCommandBuffers();
|
|
}
|
|
if (vulkanDevice->features.sampleRateShading) {
|
|
if (overlay->checkBox("Sample rate shading", &useSampleShading)) {
|
|
buildDeferredCommandBuffer();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Returns the maximum sample count usable by the platform
|
|
VkSampleCountFlagBits getMaxUsableSampleCount()
|
|
{
|
|
VkSampleCountFlags counts = std::min(deviceProperties.limits.framebufferColorSampleCounts, deviceProperties.limits.framebufferDepthSampleCounts);
|
|
if (counts & VK_SAMPLE_COUNT_64_BIT) { return VK_SAMPLE_COUNT_64_BIT; }
|
|
if (counts & VK_SAMPLE_COUNT_32_BIT) { return VK_SAMPLE_COUNT_32_BIT; }
|
|
if (counts & VK_SAMPLE_COUNT_16_BIT) { return VK_SAMPLE_COUNT_16_BIT; }
|
|
if (counts & VK_SAMPLE_COUNT_8_BIT) { return VK_SAMPLE_COUNT_8_BIT; }
|
|
if (counts & VK_SAMPLE_COUNT_4_BIT) { return VK_SAMPLE_COUNT_4_BIT; }
|
|
if (counts & VK_SAMPLE_COUNT_2_BIT) { return VK_SAMPLE_COUNT_2_BIT; }
|
|
return VK_SAMPLE_COUNT_1_BIT;
|
|
}
|
|
};
|
|
|
|
VULKAN_EXAMPLE_MAIN()
|