Overhauled parallax mapping example with multiple modes

This commit is contained in:
saschawillems 2017-03-24 17:58:25 +01:00
parent 182ff9e72d
commit 110005b859
12 changed files with 245 additions and 475 deletions

View file

@ -1,4 +0,0 @@
glslangvalidator -V parallax.vert -o parallax.vert.spv
glslangvalidator -V parallax.frag -o parallax.frag.spv
glslangvalidator -V normalmap.vert -o normalmap.vert.spv
glslangvalidator -V normalmap.frag -o normalmap.frag.spv

View file

@ -1,50 +0,0 @@
#version 450
#extension GL_ARB_separate_shader_objects : enable
#extension GL_ARB_shading_language_420pack : enable
layout (binding = 1) uniform sampler2D sColorMap;
layout (binding = 2) uniform sampler2D sNormalHeightMap;
layout (binding = 3) uniform UBO
{
float scale;
float bias;
float lightRadius;
int usePom;
int displayNormalMap;
} ubo;
layout (location = 0) in vec2 inUV;
layout (location = 1) in vec3 inLightVec;
layout (location = 2) in vec3 inLightVecB;
layout (location = 3) in vec3 inSpecular;
layout (location = 4) in vec3 inEyeVec;
layout (location = 5) in vec3 inLightDir;
layout (location = 6) in vec3 inViewVec;
layout (location = 0) out vec4 outFragColor;
void main(void)
{
vec3 specularColor = vec3(0.0, 0.0, 0.0);
float invRadius = 1.0/ubo.lightRadius;
float ambient = 0.5;
vec3 rgb, normal;
rgb = (ubo.displayNormalMap == 0) ? texture(sColorMap, inUV).rgb : texture(sNormalHeightMap, inUV).rgb;
normal = normalize((texture(sNormalHeightMap, inUV).rgb - 0.5) * 2.0);
float distSqr = dot(inLightVecB, inLightVecB);
vec3 lVec = inLightVecB * inversesqrt(distSqr);
vec3 nvViewVec = normalize(inViewVec);
float specular = pow(clamp(dot(reflect(-nvViewVec, normal), lVec), 0.0, 1.0), 4.0);
float atten = clamp(1.0 - invRadius * sqrt(distSqr), 0.0, 1.0);
float diffuse = clamp(dot(lVec, normal), 0.0, 1.0);
outFragColor = vec4((rgb * ambient + (diffuse * rgb + 0.5 * specular * specularColor.rgb)) * atten, 1.0);
}

View file

@ -1,65 +0,0 @@
#version 450
#extension GL_ARB_separate_shader_objects : enable
#extension GL_ARB_shading_language_420pack : enable
layout (location = 0) in vec3 inPos;
layout (location = 1) in vec2 inUV;
layout (location = 2) in vec3 inNormal;
layout (location = 3) in vec3 inTangent;
layout (location = 4) in vec3 inBiTangent;
layout (binding = 0) uniform UBO
{
mat4 projection;
mat4 model;
mat4 normal;
vec4 lightPos;
vec4 cameraPos;
} ubo;
layout (location = 0) out vec2 outUV;
layout (location = 1) out vec3 outLightVec;
layout (location = 2) out vec3 outLightVecB;
layout (location = 3) out vec3 outSpecular;
layout (location = 4) out vec3 outEyeVec;
layout (location = 5) out vec3 outLightDir;
layout (location = 6) out vec3 outViewVec;
void main(void)
{
vec3 vertexPosition = vec3(ubo.model * vec4(inPos, 1.0));
outLightDir = normalize(ubo.lightPos.xyz - vertexPosition);
// Setup (t)angent-(b)inormal-(n)ormal matrix for converting
// object coordinates into tangent space
mat3 tbnMatrix;
tbnMatrix[0] = mat3(ubo.normal) * inTangent;
tbnMatrix[1] = mat3(ubo.normal) * inBiTangent;
tbnMatrix[2] = mat3(ubo.normal) * inNormal;
outEyeVec = vec3(-vertexPosition) * tbnMatrix;
outLightVec.xyz = vec3(ubo.lightPos.xyz - vertexPosition.xyz) * tbnMatrix;
vec3 lightDist = ubo.lightPos.xyz - inPos.xyz;
outLightVecB.x = dot(inTangent.xyz, lightDist);
outLightVecB.y = dot(inBiTangent.xyz, lightDist);
outLightVecB.z = dot(inNormal, lightDist);
vec3 camPos = vec3(ubo.normal * ubo.cameraPos);
vec3 camVec = camPos - inPos.xyz;
outViewVec.x = dot(inTangent, camVec);
outViewVec.y = dot(inBiTangent, camVec);
outViewVec.z = dot(inNormal, camVec);
vec3 reflectVec = reflect(-camVec, inNormal);
vec3 outViewVec = outLightDir;
float specIntensity = pow(max(dot(reflectVec, outViewVec), 0.0), 8.0);
outSpecular = vec3(specIntensity * 0.3);
outUV = inUV;
gl_Position = ubo.projection * ubo.model * vec4(inPos, 1.0);
}

View file

@ -1,66 +1,143 @@
#version 450
#extension GL_ARB_separate_shader_objects : enable
#extension GL_ARB_shading_language_420pack : enable
layout (binding = 1) uniform sampler2D sColorMap;
layout (binding = 2) uniform sampler2D sNormalHeightMap;
layout (binding = 3) uniform UBO
{
float scale;
float bias;
float lightRadius;
int usePom;
int displayNormalMap;
float heightScale;
float parallaxBias;
float numLayers;
int mappingMode;
} ubo;
layout (location = 0) in vec2 inUV;
layout (location = 1) in vec3 inLightVec;
layout (location = 2) in vec3 inLightVecB;
layout (location = 3) in vec3 inSpecular;
layout (location = 4) in vec3 inEyeVec;
layout (location = 5) in vec3 inLightDir;
layout (location = 6) in vec3 inViewVec;
layout (location = 1) in vec3 inTangentLightPos;
layout (location = 2) in vec3 inTangentViewPos;
layout (location = 3) in vec3 inTangentFragPos;
layout (location = 0) out vec4 outFragColor;
layout (location = 0) out vec4 outColor;
vec2 parallax_uv(vec2 uv, vec3 view_dir, int type)
{
if (type == 2) {
// Parallax mapping
float depth = 1.0 - texture(sNormalHeightMap, uv).a;
vec2 p = view_dir.xy * (depth * (ubo.heightScale * 0.5) + ubo.parallaxBias) / view_dir.z;
return uv - p;
} else {
float layer_depth = 1.0 / ubo.numLayers;
float cur_layer_depth = 0.0;
vec2 delta_uv = view_dir.xy * ubo.heightScale / (view_dir.z * ubo.numLayers);
vec2 cur_uv = uv;
float depth_from_tex = 1.0 - texture(sNormalHeightMap, cur_uv).a;
for (int i = 0; i < 32; i++) {
cur_layer_depth += layer_depth;
cur_uv -= delta_uv;
depth_from_tex = 1.0 - texture(sNormalHeightMap, cur_uv).a;
if (depth_from_tex < cur_layer_depth) {
break;
}
}
if (type == 3) {
// Steep parallax mapping
return cur_uv;
} else {
// Parallax occlusion mapping
vec2 prev_uv = cur_uv + delta_uv;
float next = depth_from_tex - cur_layer_depth;
float prev = 1.0 - texture(sNormalHeightMap, prev_uv).a - cur_layer_depth + layer_depth;
float weight = next / (next - prev);
return mix(cur_uv, prev_uv, weight);
}
}
}
vec2 parallaxMapping(vec2 uv, vec3 viewDir)
{
float height = 1.0 - texture(sNormalHeightMap, uv).a;
vec2 p = viewDir.xy * (height * (ubo.heightScale * 0.5) + ubo.parallaxBias) / viewDir.z;
return uv - p;
}
vec2 steepParallaxMapping(vec2 uv, vec3 viewDir)
{
float layerDepth = 1.0 / ubo.numLayers;
float currLayerDepth = 0.0;
vec2 deltaUV = viewDir.xy * ubo.heightScale / (viewDir.z * ubo.numLayers);
vec2 currUV = uv;
float height = 1.0 - texture(sNormalHeightMap, currUV).a;
for (int i = 0; i < ubo.numLayers; i++) {
currLayerDepth += layerDepth;
currUV -= deltaUV;
height = 1.0 - texture(sNormalHeightMap, currUV).a;
if (height < currLayerDepth) {
break;
}
}
return currUV;
}
vec2 parallaxOcclusionMapping(vec2 uv, vec3 viewDir)
{
float layerDepth = 1.0 / ubo.numLayers;
float currLayerDepth = 0.0;
vec2 deltaUV = viewDir.xy * ubo.heightScale / (viewDir.z * ubo.numLayers);
vec2 currUV = uv;
float height = 1.0 - texture(sNormalHeightMap, currUV).a;
for (int i = 0; i < ubo.numLayers; i++) {
currLayerDepth += layerDepth;
currUV -= deltaUV;
height = 1.0 - texture(sNormalHeightMap, currUV).a;
if (height < currLayerDepth) {
break;
}
}
vec2 prevUV = currUV + deltaUV;
float nextDepth = height - currLayerDepth;
float prevDepth = 1.0 - texture(sNormalHeightMap, prevUV).a - currLayerDepth + layerDepth;
return mix(currUV, prevUV, nextDepth / (nextDepth - prevDepth));
}
void main(void)
{
vec3 specularColor = vec3(0.0, 0.0, 0.0);
vec3 V = normalize(inTangentViewPos - inTangentFragPos);
vec2 uv = inUV;
float invRadius = 1.0/ubo.lightRadius;
float ambient = 0.5;
vec3 rgb, normal, eyeVecTs;
vec2 UV = inUV;
// Get new scaled and biased texture coordinates
// Height info is stored in alpha channel of normal map
vec2 height_bump = vec2(texture(sNormalHeightMap, inUV).a * ubo.scale + ubo.bias, 0.0);
// If parallax mapping is enabled, offset texture coordinates
if (ubo.usePom == 1)
{
UV = inUV + (height_bump.x * normalize(inEyeVec).xy);
if (ubo.mappingMode == 0) {
// Color only
outColor = texture(sColorMap, inUV);
} else {
switch(ubo.mappingMode) {
case 2:
uv = parallaxMapping(inUV, V);
break;
case 3:
uv = steepParallaxMapping(inUV, V);
break;
case 4:
uv = parallaxOcclusionMapping(inUV, V);
break;
}
rgb = (ubo.displayNormalMap == 0) ? texture(sColorMap, UV).rgb : texture(sNormalHeightMap, UV).rgb;
normal = normalize((texture(sNormalHeightMap, UV).rgb - 0.5) * 2.0);
eyeVecTs = normalize(inLightVec).xyz;
height_bump.y = min(dot(normal, eyeVecTs.xyz), 1.0);
height_bump.y = pow(height_bump.y, 8.0);
float distSqr = dot(inLightVecB, inLightVecB);
vec3 lVec = inLightVecB * inversesqrt(distSqr);
vec3 nvViewVec = normalize(inViewVec);
float specular = pow(clamp(dot(reflect(-nvViewVec, normal), lVec), 0.0, 1.0), 4.0);
float atten = clamp(1.0 - invRadius * sqrt(distSqr), 0.0, 1.0);
float diffuse = clamp(dot(lVec, normal), 0.0, 1.0);
outFragColor = vec4((rgb * ambient + (diffuse * rgb + 0.5 * specular * specularColor.rgb)) * atten, 1.0);
// Discard fragments at texture border
if (uv.x < 0.0 || uv.x > 1.0 || uv.y < 0.0 || uv.y > 1.0) {
discard;
}
vec3 N = normalize(texture(sNormalHeightMap, uv).rgb * 2.0 - 1.0);
vec3 L = normalize(inTangentLightPos - inTangentFragPos);
vec3 R = reflect(-L, N);
vec3 H = normalize(L + V);
vec3 color = texture(sColorMap, uv).rgb;
vec3 ambient = 0.2 * color;
vec3 diffuse = max(dot(L, N), 0.0) * color;
vec3 specular = vec3(0.15) * pow(max(dot(N, H), 0.0), 32.0);
outColor = vec4(ambient + diffuse + specular, 1.0f);
}
}

View file

@ -1,8 +1,5 @@
#version 450
#extension GL_ARB_separate_shader_objects : enable
#extension GL_ARB_shading_language_420pack : enable
layout (location = 0) in vec3 inPos;
layout (location = 1) in vec2 inUV;
layout (location = 2) in vec3 inNormal;
@ -12,54 +9,29 @@ layout (location = 4) in vec3 inBiTangent;
layout (binding = 0) uniform UBO
{
mat4 projection;
mat4 view;
mat4 model;
mat4 normal;
vec4 lightPos;
vec4 cameraPos;
} ubo;
layout (location = 0) out vec2 outUV;
layout (location = 1) out vec3 outLightVec;
layout (location = 2) out vec3 outLightVecB;
layout (location = 3) out vec3 outSpecular;
layout (location = 4) out vec3 outEyeVec;
layout (location = 5) out vec3 outLightDir;
layout (location = 6) out vec3 outViewVec;
layout (location = 1) out vec3 outTangentLightPos;
layout (location = 2) out vec3 outTangentViewPos;
layout (location = 3) out vec3 outTangentFragPos;
void main(void)
{
vec3 vertexPosition = vec3(ubo.model * vec4(inPos, 1.0));
outLightDir = normalize(ubo.lightPos.xyz - vertexPosition);
// Setup (t)angent-(b)inormal-(n)ormal matrix for converting
// object coordinates into tangent space
mat3 tbnMatrix;
tbnMatrix[0] = mat3(ubo.normal) * inTangent;
tbnMatrix[1] = mat3(ubo.normal) * inBiTangent;
tbnMatrix[2] = mat3(ubo.normal) * inNormal;
outEyeVec = vec3(-vertexPosition) * tbnMatrix;
outLightVec.xyz = vec3(ubo.lightPos.xyz - vertexPosition.xyz) * tbnMatrix;
vec3 lightDist = ubo.lightPos.xyz - inPos.xyz;
outLightVecB.x = dot(inTangent.xyz, lightDist);
outLightVecB.y = dot(inBiTangent.xyz, lightDist);
outLightVecB.z = dot(inNormal, lightDist);
vec3 camPos = vec3(ubo.normal * ubo.cameraPos);
vec3 camVec = camPos - inPos.xyz;
outViewVec.x = dot(inTangent, camVec);
outViewVec.y = dot(inBiTangent, camVec);
outViewVec.z = dot(inNormal, camVec);
vec3 reflectVec = reflect(-camVec, inNormal);
vec3 outViewVec = outLightDir;
float specIntensity = pow(max(dot(reflectVec, outViewVec), 0.0), 8.0);
outSpecular = vec3(specIntensity * 0.3);
gl_Position = ubo.projection * ubo.view * ubo.model * vec4(inPos, 1.0f);
outTangentFragPos = vec3(ubo.model * vec4(inPos, 1.0));
outUV = inUV;
gl_Position = ubo.projection * ubo.model * vec4(inPos, 1.0);
vec3 T = normalize(mat3(ubo.model) * inTangent);
vec3 B = normalize(mat3(ubo.model) * inBiTangent);
vec3 N = normalize(mat3(ubo.model) * inNormal);
mat3 TBN = transpose(mat3(T, B, N));
outTangentLightPos = TBN * ubo.lightPos.xyz;
outTangentViewPos = TBN * ubo.cameraPos.xyz;
outTangentFragPos = TBN * outTangentFragPos;
}

View file

@ -30,20 +30,12 @@
class VulkanExample : public VulkanExampleBase
{
public:
bool splitScreen = false;
struct {
vks::Texture2D colorMap;
// Normals and height are combined in one texture (height = alpha channel)
// Normals and height are combined into one texture (height = alpha channel)
vks::Texture2D normalHeightMap;
} textures;
struct {
VkPipelineVertexInputStateCreateInfo inputState;
std::vector<VkVertexInputBindingDescription> bindingDescriptions;
std::vector<VkVertexInputAttributeDescription> attributeDescriptions;
} vertices;
// Vertex layout for the models
vks::VertexLayout vertexLayout = vks::VertexLayout({
vks::VERTEX_COMPONENT_POSITION,
@ -66,51 +58,43 @@ public:
struct {
glm::mat4 projection;
glm::mat4 view;
glm::mat4 model;
glm::mat4 normal;
glm::vec4 lightPos = glm::vec4(0.0f);
glm::vec4 lightPos = glm::vec4(0.0f, -2.0f, 0.0f, 1.0f);
glm::vec4 cameraPos;
} vertexShader;
struct {
// Scale and bias control the parallax offset effect
// They need to be tweaked for each material
// Getting them wrong destroys the depth effect
float scale = 0.06f;
float bias = -0.04f;
float lightRadius = 1.0f;
int32_t usePom = 1;
int32_t displayNormalMap = 0;
float heightScale = 0.1f;
// Basic parallax mapping needs a bias to look any good (and is hard to tweak)
float parallaxBias = -0.02f;
// Number of layers for steep parallax and parallax occlusion (more layer = better result for less performance)
float numLayers = 48.0f;
// (Parallax) mapping mode to use
int32_t mappingMode = 4;
} fragmentShader;
} ubos;
struct {
VkPipeline parallaxMapping;
VkPipeline normalMapping;
} pipelines;
VkPipelineLayout pipelineLayout;
VkDescriptorSet descriptorSet;
VkPipeline pipeline;
VkDescriptorSetLayout descriptorSetLayout;
VkDescriptorSet descriptorSet;
VulkanExample() : VulkanExampleBase(ENABLE_VALIDATION)
{
zoom = -2.7f;
rotation = glm::vec3(56.0f, 0.0f, 0.0f);
rotationSpeed = 0.25f;
enableTextOverlay = true;
timerSpeed *= 0.25f;
paused = true;
title = "Vulkan Example - Parallax Mapping";
enableTextOverlay = true;
timerSpeed *= 0.5f;
camera.type = Camera::CameraType::firstperson;
camera.setPosition(glm::vec3(0.0f, 1.25f, 1.5f));
camera.setRotation(glm::vec3(-45.0f, 180.0f, 0.0f));
camera.setPerspective(60.0f, (float)width / (float)height, 0.1f, 256.0f);
}
~VulkanExample()
{
// Clean up used Vulkan resources
// Note : Inherited destructor cleans up resources stored in base class
vkDestroyPipeline(device, pipelines.parallaxMapping, nullptr);
vkDestroyPipeline(device, pipelines.normalMapping, nullptr);
vkDestroyPipeline(device, pipeline, nullptr);
vkDestroyPipelineLayout(device, pipelineLayout, nullptr);
vkDestroyDescriptorSetLayout(device, descriptorSetLayout, nullptr);
@ -127,6 +111,7 @@ public:
void loadAssets()
{
models.quad.loadFromFile(getAssetPath() + "models/plane_z.obj", vertexLayout, 0.1f, vulkanDevice, queue);
// Textures
textures.normalHeightMap.loadFromFile(getAssetPath() + "textures/rocks_normal_height_rgba.dds", VK_FORMAT_R8G8B8A8_UNORM, vulkanDevice, queue);
if (vulkanDevice->features.textureCompressionBC) {
@ -141,17 +126,6 @@ public:
else {
vks::tools::exitFatal("Device does not support any compressed texture format!", "Error");
}
}
void reBuildCommandBuffers()
{
if (!checkCommandBuffers())
{
destroyCommandBuffers();
createCommandBuffers();
}
buildCommandBuffers();
}
void buildCommandBuffers()
@ -180,7 +154,7 @@ public:
vkCmdBeginRenderPass(drawCmdBuffers[i], &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
VkViewport viewport = vks::initializers::viewport((splitScreen) ? (float)width / 2.0f : (float)width, (float)height, 0.0f, 1.0f);
VkViewport viewport = vks::initializers::viewport((float)width, (float)height, 0.0f, 1.0f);
vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport);
VkRect2D scissor = vks::initializers::rect2D(width, height, 0, 0);
@ -192,82 +166,16 @@ public:
vkCmdBindVertexBuffers(drawCmdBuffers[i], VERTEX_BUFFER_BIND_ID, 1, &models.quad.vertices.buffer, offsets);
vkCmdBindIndexBuffer(drawCmdBuffers[i], models.quad.indices.buffer, 0, VK_INDEX_TYPE_UINT32);
// Parallax enabled
vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport);
vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.parallaxMapping);
vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
vkCmdDrawIndexed(drawCmdBuffers[i], models.quad.indexCount, 1, 0, 0, 1);
// Normal mapping
if (splitScreen)
{
viewport.x = (float)width / 2.0f;
vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport);
vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelines.normalMapping);
vkCmdDrawIndexed(drawCmdBuffers[i], models.quad.indexCount, 1, 0, 0, 1);
}
vkCmdEndRenderPass(drawCmdBuffers[i]);
VK_CHECK_RESULT(vkEndCommandBuffer(drawCmdBuffers[i]));
}
}
void setupVertexDescriptions()
{
// Binding description
vertices.bindingDescriptions.resize(1);
vertices.bindingDescriptions[0] =
vks::initializers::vertexInputBindingDescription(
VERTEX_BUFFER_BIND_ID,
vertexLayout.stride(),
VK_VERTEX_INPUT_RATE_VERTEX);
// Attribute descriptions
// Describes memory layout and shader positions
vertices.attributeDescriptions.resize(5);
// Location 0 : Position
vertices.attributeDescriptions[0] =
vks::initializers::vertexInputAttributeDescription(
VERTEX_BUFFER_BIND_ID,
0,
VK_FORMAT_R32G32B32_SFLOAT,
0);
// Location 1 : Texture coordinates
vertices.attributeDescriptions[1] =
vks::initializers::vertexInputAttributeDescription(
VERTEX_BUFFER_BIND_ID,
1,
VK_FORMAT_R32G32_SFLOAT,
sizeof(float) * 3);
// Location 2 : Normal
vertices.attributeDescriptions[2] =
vks::initializers::vertexInputAttributeDescription(
VERTEX_BUFFER_BIND_ID,
2,
VK_FORMAT_R32G32B32_SFLOAT,
sizeof(float) * 5);
// Location 3 : Tangent
vertices.attributeDescriptions[3] =
vks::initializers::vertexInputAttributeDescription(
VERTEX_BUFFER_BIND_ID,
3,
VK_FORMAT_R32G32B32_SFLOAT,
sizeof(float) * 8);
// Location 4 : Bitangent
vertices.attributeDescriptions[4] =
vks::initializers::vertexInputAttributeDescription(
VERTEX_BUFFER_BIND_ID,
4,
VK_FORMAT_R32G32B32_SFLOAT,
sizeof(float) * 11);
vertices.inputState = vks::initializers::pipelineVertexInputStateCreateInfo();
vertices.inputState.vertexBindingDescriptionCount = vertices.bindingDescriptions.size();
vertices.inputState.pVertexBindingDescriptions = vertices.bindingDescriptions.data();
vertices.inputState.vertexAttributeDescriptionCount = vertices.attributeDescriptions.size();
vertices.inputState.pVertexAttributeDescriptions = vertices.attributeDescriptions.data();
}
void setupDescriptorPool()
{
// Example uses two ubos and two image sampler
@ -278,44 +186,22 @@ public:
};
VkDescriptorPoolCreateInfo descriptorPoolInfo =
vks::initializers::descriptorPoolCreateInfo(
poolSizes.size(),
poolSizes.data(),
4);
vks::initializers::descriptorPoolCreateInfo(poolSizes, 2);
VK_CHECK_RESULT(vkCreateDescriptorPool(device, &descriptorPoolInfo, nullptr, &descriptorPool));
}
void setupDescriptorSetLayout()
{
std::vector<VkDescriptorSetLayoutBinding> setLayoutBindings =
{
// Binding 0 : Vertex shader uniform buffer
vks::initializers::descriptorSetLayoutBinding(
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
VK_SHADER_STAGE_VERTEX_BIT,
0),
// Binding 1 : Fragment shader color map image sampler
vks::initializers::descriptorSetLayoutBinding(
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
VK_SHADER_STAGE_FRAGMENT_BIT,
1),
// Binding 2 : Fragment combined normal and heightmap
vks::initializers::descriptorSetLayoutBinding(
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
VK_SHADER_STAGE_FRAGMENT_BIT,
2),
// Binding 3 : Fragment shader uniform buffer
vks::initializers::descriptorSetLayoutBinding(
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
VK_SHADER_STAGE_FRAGMENT_BIT,
3)
std::vector<VkDescriptorSetLayoutBinding> setLayoutBindings = {
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_VERTEX_BIT, 0), // Binding 0: Vertex shader uniform buffer
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT, 1), // Binding 1: Fragment shader color map image sampler
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT, 2), // Binding 2: Fragment combined normal and heightmap
vks::initializers::descriptorSetLayoutBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_FRAGMENT_BIT, 3), // Binding 3: Fragment shader uniform buffer
};
VkDescriptorSetLayoutCreateInfo descriptorLayout =
vks::initializers::descriptorSetLayoutCreateInfo(
setLayoutBindings.data(),
setLayoutBindings.size());
vks::initializers::descriptorSetLayoutCreateInfo(setLayoutBindings);
VK_CHECK_RESULT(vkCreateDescriptorSetLayout(device, &descriptorLayout, nullptr, &descriptorSetLayout));
@ -337,99 +223,52 @@ public:
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSet));
std::vector<VkWriteDescriptorSet> writeDescriptorSets =
{
// Binding 0 : Vertex shader uniform buffer
vks::initializers::writeDescriptorSet(
descriptorSet,
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
0,
&uniformBuffers.vertexShader.descriptor),
// Binding 1 : Fragment shader image sampler
vks::initializers::writeDescriptorSet(
descriptorSet,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
1,
&textures.colorMap.descriptor),
// Binding 2 : Combined normal and heightmap
vks::initializers::writeDescriptorSet(
descriptorSet,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
2,
&textures.normalHeightMap.descriptor),
// Binding 3 : Fragment shader uniform buffer
vks::initializers::writeDescriptorSet(
descriptorSet,
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
3,
&uniformBuffers.fragmentShader.descriptor)
std::vector<VkWriteDescriptorSet> writeDescriptorSets = {
vks::initializers::writeDescriptorSet(descriptorSet, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformBuffers.vertexShader.descriptor), // Binding 0: Vertex shader uniform buffer
vks::initializers::writeDescriptorSet(descriptorSet, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &textures.colorMap.descriptor), // Binding 1: Fragment shader image sampler
vks::initializers::writeDescriptorSet(descriptorSet, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2, &textures.normalHeightMap.descriptor), // Binding 2: Combined normal and heightmap
vks::initializers::writeDescriptorSet(descriptorSet, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3, &uniformBuffers.fragmentShader.descriptor), // Binding 3: Fragment shader uniform buffer
};
vkUpdateDescriptorSets(device, writeDescriptorSets.size(), writeDescriptorSets.data(), 0, NULL);
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, NULL);
}
void preparePipelines()
{
VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
vks::initializers::pipelineInputAssemblyStateCreateInfo(
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
0,
VK_FALSE);
vks::initializers::pipelineInputAssemblyStateCreateInfo(VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 0, VK_FALSE);
VkPipelineRasterizationStateCreateInfo rasterizationState =
vks::initializers::pipelineRasterizationStateCreateInfo(
VK_POLYGON_MODE_FILL,
VK_CULL_MODE_NONE,
VK_FRONT_FACE_COUNTER_CLOCKWISE,
0);
vks::initializers::pipelineRasterizationStateCreateInfo(VK_POLYGON_MODE_FILL, VK_CULL_MODE_NONE, VK_FRONT_FACE_COUNTER_CLOCKWISE);
VkPipelineColorBlendAttachmentState blendAttachmentState =
vks::initializers::pipelineColorBlendAttachmentState(
0xf,
VK_FALSE);
vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE);
VkPipelineColorBlendStateCreateInfo colorBlendState =
vks::initializers::pipelineColorBlendStateCreateInfo(
1,
&blendAttachmentState);
vks::initializers::pipelineColorBlendStateCreateInfo(1, &blendAttachmentState);
VkPipelineDepthStencilStateCreateInfo depthStencilState =
vks::initializers::pipelineDepthStencilStateCreateInfo(
VK_TRUE,
VK_TRUE,
VK_COMPARE_OP_LESS_OR_EQUAL);
vks::initializers::pipelineDepthStencilStateCreateInfo(VK_TRUE, VK_TRUE, VK_COMPARE_OP_LESS_OR_EQUAL);
VkPipelineViewportStateCreateInfo viewportState =
vks::initializers::pipelineViewportStateCreateInfo(1, 1, 0);
VkPipelineMultisampleStateCreateInfo multisampleState =
vks::initializers::pipelineMultisampleStateCreateInfo(
VK_SAMPLE_COUNT_1_BIT,
0);
vks::initializers::pipelineMultisampleStateCreateInfo(VK_SAMPLE_COUNT_1_BIT);
std::vector<VkDynamicState> dynamicStateEnables = {
VK_DYNAMIC_STATE_VIEWPORT,
VK_DYNAMIC_STATE_SCISSOR
};
VkPipelineDynamicStateCreateInfo dynamicState =
vks::initializers::pipelineDynamicStateCreateInfo(
dynamicStateEnables.data(),
dynamicStateEnables.size(),
0);
vks::initializers::pipelineDynamicStateCreateInfo(dynamicStateEnables);
// Parallax mapping pipeline
// Load shaders
std::array<VkPipelineShaderStageCreateInfo, 2> shaderStages;
shaderStages[0] = loadShader(getAssetPath() + "shaders/parallax/parallax.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
shaderStages[1] = loadShader(getAssetPath() + "shaders/parallax/parallax.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
VkGraphicsPipelineCreateInfo pipelineCreateInfo =
vks::initializers::pipelineCreateInfo(
pipelineLayout,
renderPass,
0);
vks::initializers::pipelineCreateInfo(pipelineLayout, renderPass);
pipelineCreateInfo.pVertexInputState = &vertices.inputState;
pipelineCreateInfo.pInputAssemblyState = &inputAssemblyState;
pipelineCreateInfo.pRasterizationState = &rasterizationState;
pipelineCreateInfo.pColorBlendState = &colorBlendState;
@ -437,15 +276,32 @@ public:
pipelineCreateInfo.pViewportState = &viewportState;
pipelineCreateInfo.pDepthStencilState = &depthStencilState;
pipelineCreateInfo.pDynamicState = &dynamicState;
pipelineCreateInfo.stageCount = shaderStages.size();
pipelineCreateInfo.stageCount = static_cast<uint32_t>(shaderStages.size());
pipelineCreateInfo.pStages = shaderStages.data();
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.parallaxMapping));
// Vertex bindings an attributes
std::vector<VkVertexInputBindingDescription> vertexInputBindings = {
vks::initializers::vertexInputBindingDescription(0, vertexLayout.stride(), VK_VERTEX_INPUT_RATE_VERTEX),
};
std::vector<VkVertexInputAttributeDescription> vertexInputAttributes = {
vks::initializers::vertexInputAttributeDescription(VERTEX_BUFFER_BIND_ID, 0, VK_FORMAT_R32G32B32_SFLOAT, 0), // Location 0: Position
vks::initializers::vertexInputAttributeDescription(VERTEX_BUFFER_BIND_ID, 1, VK_FORMAT_R32G32_SFLOAT, sizeof(float) * 3), // Location 1: Texture coordinates
vks::initializers::vertexInputAttributeDescription(VERTEX_BUFFER_BIND_ID, 2, VK_FORMAT_R32G32B32_SFLOAT, sizeof(float) * 5), // Location 2: Normal
vks::initializers::vertexInputAttributeDescription(VERTEX_BUFFER_BIND_ID, 3, VK_FORMAT_R32G32B32_SFLOAT, sizeof(float) * 8), // Location 3: Tangent
vks::initializers::vertexInputAttributeDescription(VERTEX_BUFFER_BIND_ID, 4, VK_FORMAT_R32G32B32_SFLOAT, sizeof(float) * 11), // Location 4: Bitangent
};
VkPipelineVertexInputStateCreateInfo vertexInputState = vks::initializers::pipelineVertexInputStateCreateInfo();
vertexInputState.vertexBindingDescriptionCount = static_cast<uint32_t>(vertexInputBindings.size());
vertexInputState.pVertexBindingDescriptions = vertexInputBindings.data();
vertexInputState.vertexAttributeDescriptionCount = static_cast<uint32_t>(vertexInputAttributes.size());
vertexInputState.pVertexAttributeDescriptions = vertexInputAttributes.data();
// Normal mapping (no parallax effect)
shaderStages[0] = loadShader(getAssetPath() + "shaders/parallax/normalmap.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
shaderStages[1] = loadShader(getAssetPath() + "shaders/parallax/normalmap.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipelines.normalMapping));
pipelineCreateInfo.pVertexInputState = &vertexInputState;
// Parallax mapping modes pipeline
shaderStages[0] = loadShader(getAssetPath() + "shaders/parallax/parallax.vert.spv", VK_SHADER_STAGE_VERTEX_BIT);
shaderStages[1] = loadShader(getAssetPath() + "shaders/parallax/parallax.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT);
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCreateInfo, nullptr, &pipeline));
}
void prepareUniformBuffers()
@ -474,25 +330,17 @@ public:
void updateUniformBuffers()
{
// Vertex shader
glm::mat4 viewMatrix = glm::mat4();
ubos.vertexShader.projection = glm::perspective(glm::radians(45.0f), (float)(width* ((splitScreen) ? 0.5f : 1.0f)) / (float)height, 0.001f, 256.0f);
viewMatrix = glm::translate(viewMatrix, glm::vec3(0.0f, 0.0f, zoom));
ubos.vertexShader.projection = camera.matrices.perspective;
ubos.vertexShader.view = camera.matrices.view;
ubos.vertexShader.model = glm::rotate(glm::mat4(), glm::radians(90.0f), glm::vec3(1.0f, 0.0f, 0.0f));;
ubos.vertexShader.model = glm::rotate(ubos.vertexShader.model, glm::radians(180.0f), glm::vec3(0.0f, 0.0f, 1.0f));;
ubos.vertexShader.model = glm::mat4();
ubos.vertexShader.model = viewMatrix * glm::translate(ubos.vertexShader.model, cameraPos);
ubos.vertexShader.model = glm::rotate(ubos.vertexShader.model, glm::radians(rotation.x), glm::vec3(1.0f, 0.0f, 0.0f));
ubos.vertexShader.model = glm::rotate(ubos.vertexShader.model, glm::radians(rotation.y), glm::vec3(0.0f, 1.0f, 0.0f));
ubos.vertexShader.model = glm::rotate(ubos.vertexShader.model, glm::radians(rotation.z), glm::vec3(0.0f, 0.0f, 1.0f));
ubos.vertexShader.normal = glm::inverseTranspose(ubos.vertexShader.model);
if (!paused)
{
ubos.vertexShader.lightPos.x = sin(glm::radians(timer * 360.0f)) * 0.5f;
ubos.vertexShader.lightPos.y = cos(glm::radians(timer * 360.0f)) * 0.5f;
if (!paused) {
ubos.vertexShader.lightPos.x = sin(glm::radians(timer * 360.0f)) * 1.5f;
ubos.vertexShader.lightPos.z = cos(glm::radians(timer * 360.0f)) * 1.5f;
}
ubos.vertexShader.cameraPos = glm::vec4(0.0, 0.0, zoom, 0.0);
ubos.vertexShader.cameraPos = glm::vec4(camera.position, -1.0f) * -1.0f;
memcpy(uniformBuffers.vertexShader.mapped, &ubos.vertexShader, sizeof(ubos.vertexShader));
@ -503,14 +351,9 @@ public:
void draw()
{
VulkanExampleBase::prepareFrame();
// Command buffer to be sumitted to the queue
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &drawCmdBuffers[currentBuffer];
// Submit to queue
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
VulkanExampleBase::submitFrame();
}
@ -518,7 +361,6 @@ public:
{
VulkanExampleBase::prepare();
loadAssets();
setupVertexDescriptions();
prepareUniformBuffers();
setupDescriptorSetLayout();
preparePipelines();
@ -544,54 +386,36 @@ public:
updateUniformBuffers();
}
void toggleParallaxOffset()
void toggleMappingMode()
{
ubos.fragmentShader.usePom = !ubos.fragmentShader.usePom;
ubos.fragmentShader.mappingMode++;
if (ubos.fragmentShader.mappingMode > 4) {
ubos.fragmentShader.mappingMode = 0;
};
updateUniformBuffers();
}
void toggleNormalMapDisplay()
{
ubos.fragmentShader.displayNormalMap = !ubos.fragmentShader.displayNormalMap;
updateUniformBuffers();
}
void toggleSplitScreen()
{
splitScreen = !splitScreen;
updateUniformBuffers();
reBuildCommandBuffers();
updateTextOverlay();
}
virtual void keyPressed(uint32_t keyCode)
{
switch (keyCode)
{
case KEY_O:
case KEY_SPACE:
case GAMEPAD_BUTTON_A:
toggleParallaxOffset();
break;
case KEY_N:
case GAMEPAD_BUTTON_X:
toggleNormalMapDisplay();
break;
case KEY_S:
case GAMEPAD_BUTTON_Y:
toggleSplitScreen();
toggleMappingMode();
break;
}
}
virtual void getOverlayText(VulkanTextOverlay *textOverlay)
{
const std::vector<std::string> mappingModes = {
"Color only", "Normal mapping", "Parallax mapping", "Steep parallax mapping", "Parallax occlusion mapping",
};
#if defined(__ANDROID__)
textOverlay->addText("Press \"Button A\" to toggle parallax", 5.0f, 85.0f, VulkanTextOverlay::alignLeft);
textOverlay->addText("Press \"Button X\" to toggle normals", 5.0f, 100.0f, VulkanTextOverlay::alignLeft);
textOverlay->addText("Press \"Button Y\" to toggle splitscreen", 5.0f, 115.0f, VulkanTextOverlay::alignLeft);
textOverlay->addText("Mode: " + mappingModes[ubos.fragmentShader.mappingMode] + " (\"Button A\")", 5.0f, 85.0f, VulkanTextOverlay::alignLeft);
#else
textOverlay->addText("Press \"o\" to toggle parallax", 5.0f, 85.0f, VulkanTextOverlay::alignLeft);
textOverlay->addText("Press \"n\" to toggle normals", 5.0f, 100.0f, VulkanTextOverlay::alignLeft);
textOverlay->addText("Press \"s\" to toggle splitscreen", 5.0f, 115.0f, VulkanTextOverlay::alignLeft);
textOverlay->addText("Mode: " + mappingModes[ubos.fragmentShader.mappingMode] + " (\"Space\")", 5.0f, 85.0f, VulkanTextOverlay::alignLeft);
#endif
}
};

View file

@ -21,6 +21,10 @@
<ClInclude Include="..\base\vulkanexamplebase.h" />
<ClInclude Include="..\base\vulkantools.h" />
</ItemGroup>
<ItemGroup>
<None Include="..\data\shaders\parallax\parallax.frag" />
<None Include="..\data\shaders\parallax\parallax.vert" />
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{3DB08441-72C9-4172-8BA9-ECEE032387BA}</ProjectGuid>
<RootNamespace>parallaxmapping</RootNamespace>
@ -54,6 +58,7 @@
<Optimization>Disabled</Optimization>
<OpenMPSupport>true</OpenMPSupport>
<AdditionalOptions>/FS %(AdditionalOptions)</AdditionalOptions>
<WarningLevel>Level3</WarningLevel>
</ClCompile>
<Link>
<AdditionalDependencies>..\libs\vulkan\vulkan-1.lib;..\libs\assimp\assimp.lib;%(AdditionalDependencies)</AdditionalDependencies>

View file

@ -13,6 +13,9 @@
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
</Filter>
<Filter Include="Shaders">
<UniqueIdentifier>{d4e5cf47-9778-4a8e-8ef1-632080b554d6}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\base\vulkandebug.cpp">
@ -39,4 +42,12 @@
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<None Include="..\data\shaders\parallax\parallax.frag">
<Filter>Shaders</Filter>
</None>
<None Include="..\data\shaders\parallax\parallax.vert">
<Filter>Shaders</Filter>
</None>
</ItemGroup>
</Project>