procedural-3d-engine/examples/mesh/mesh.cpp

747 lines
27 KiB
C++
Raw Normal View History

2016-02-16 15:07:25 +01:00
/*
2017-02-11 09:41:14 +01:00
* Vulkan Example - Model loading and rendering
2016-02-16 15:07:25 +01:00
*
* Copyright (C) 2016-2020 by Sascha Willems - www.saschawillems.de
2016-02-16 15:07:25 +01:00
*
* This code is licensed under the MIT license (MIT) (http://opensource.org/licenses/MIT)
*/
/*
* Shows how to load and display a simple mesh from a glTF file
* Note that this isn't a complete glTF loader and only basic functions are shown here
* This means only linear nodes (no parent<->child tree), no animations, no skins, etc.
* For details on how glTF 2.0 works, see the official spec at https://github.com/KhronosGroup/glTF/tree/master/specification/2.0
*
* If you are looking for a complete glTF implementation, check out https://github.com/SaschaWillems/Vulkan-glTF-PBR/
*/
2016-02-16 15:07:25 +01:00
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <vector>
#define GLM_FORCE_RADIANS
#define GLM_FORCE_DEPTH_ZERO_TO_ONE
2016-02-16 15:07:25 +01:00
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#define TINYGLTF_IMPLEMENTATION
#define STB_IMAGE_IMPLEMENTATION
#define TINYGLTF_NO_STB_IMAGE_WRITE
#include "tiny_gltf.h"
2016-02-16 15:07:25 +01:00
#include <vulkan/vulkan.h>
#include "vulkanexamplebase.h"
#include "VulkanTexture.hpp"
2016-02-16 15:07:25 +01:00
#define ENABLE_VALIDATION false
class VulkanExample : public VulkanExampleBase
{
public:
2016-05-15 11:13:57 +02:00
bool wireframe = false;
2016-02-16 15:07:25 +01:00
struct {
vks::Texture2D colorMap;
2016-02-16 15:07:25 +01:00
} textures;
2017-02-11 09:41:14 +01:00
// Vertex layout used in this example
// This must fit input locations of the vertex shader used to render the model
struct Vertex {
glm::vec3 pos;
glm::vec3 normal;
glm::vec2 uv;
glm::vec3 color;
};
struct ModelNode;
// Represents a single mesh-based node in the glTF scene graph
// This is simplified as much as possible to make this sample easy to understand
struct ModelNode {
ModelNode* parent;
uint32_t firstIndex;
uint32_t indexCount;
glm::mat4 matrix;
std::vector<ModelNode> children;
};
// Represents a glTF material used to access e.g. the texture to choose for a mesh
// Only includes the most basic properties required for this sample
struct ModelMaterial {
glm::vec4 baseColorFactor = glm::vec4(1.0f);
uint32_t baseColorTextureIndex;
};
2017-02-11 09:41:14 +01:00
// Contains all Vulkan resources required to represent vertex and index buffers for a model
// This is for demonstration and learning purposes, the other examples use a model loader class for easy access
struct Model {
std::vector<vks::Texture2D> images;
// Textures in glTF are indices used by material to select an image (and optionally samplers)
std::vector<uint32_t> textures;
std::vector<ModelMaterial> materials;
std::vector<ModelNode> nodes;
2016-02-16 15:07:25 +01:00
struct {
2017-02-11 09:41:14 +01:00
VkBuffer buffer;
VkDeviceMemory memory;
2016-02-16 15:07:25 +01:00
} vertices;
struct {
int count;
2017-02-11 09:41:14 +01:00
VkBuffer buffer;
VkDeviceMemory memory;
2016-02-16 15:07:25 +01:00
} indices;
2017-02-11 09:41:14 +01:00
// Destroys all Vulkan resources created for this model
void destroy(VkDevice device)
{
vkDestroyBuffer(device, vertices.buffer, nullptr);
vkFreeMemory(device, vertices.memory, nullptr);
vkDestroyBuffer(device, indices.buffer, nullptr);
vkFreeMemory(device, indices.memory, nullptr);
};
} model;
2016-02-16 15:07:25 +01:00
struct {
vks::Buffer scene;
} uniformBuffers;
2016-02-16 15:07:25 +01:00
struct {
glm::mat4 projection;
glm::mat4 model;
2016-05-15 11:13:57 +02:00
glm::vec4 lightPos = glm::vec4(25.0f, 5.0f, 5.0f, 1.0f);
2016-02-16 15:07:25 +01:00
} uboVS;
struct Pipelines {
2016-02-16 15:07:25 +01:00
VkPipeline solid;
VkPipeline wireframe = VK_NULL_HANDLE;
2016-02-16 15:07:25 +01:00
} pipelines;
VkPipelineLayout pipelineLayout;
VkDescriptorSet descriptorSet;
VkDescriptorSetLayout descriptorSetLayout;
VulkanExample() : VulkanExampleBase(ENABLE_VALIDATION)
{
2016-05-15 11:13:57 +02:00
zoom = -5.5f;
2016-02-16 15:07:25 +01:00
zoomSpeed = 2.5f;
rotationSpeed = 0.5f;
2016-05-15 11:13:57 +02:00
rotation = { -0.5f, -112.75f, 0.0f };
cameraPos = { 0.1f, 1.1f, 0.0f };
title = "Model rendering";
settings.overlay = true;
2016-02-16 15:07:25 +01:00
}
~VulkanExample()
{
// Clean up used Vulkan resources
// Note : Inherited destructor cleans up resources stored in base class
vkDestroyPipeline(device, pipelines.solid, nullptr);
if (pipelines.wireframe != VK_NULL_HANDLE) {
vkDestroyPipeline(device, pipelines.wireframe, nullptr);
}
2016-02-16 15:07:25 +01:00
vkDestroyPipelineLayout(device, pipelineLayout, nullptr);
vkDestroyDescriptorSetLayout(device, descriptorSetLayout, nullptr);
2017-02-11 09:41:14 +01:00
model.destroy(device);
2016-02-16 15:07:25 +01:00
textures.colorMap.destroy();
uniformBuffers.scene.destroy();
2016-02-16 15:07:25 +01:00
}
virtual void getEnabledFeatures()
{
// Fill mode non solid is required for wireframe display
if (deviceFeatures.fillModeNonSolid) {
enabledFeatures.fillModeNonSolid = VK_TRUE;
};
}
2016-02-16 15:07:25 +01:00
void buildCommandBuffers()
{
VkCommandBufferBeginInfo cmdBufInfo = vks::initializers::commandBufferBeginInfo();
2016-02-16 15:07:25 +01:00
VkClearValue clearValues[2];
clearValues[0].color = defaultClearColor;
clearValues[1].depthStencil = { 1.0f, 0 };
VkRenderPassBeginInfo renderPassBeginInfo = vks::initializers::renderPassBeginInfo();
2016-02-16 15:07:25 +01:00
renderPassBeginInfo.renderPass = renderPass;
renderPassBeginInfo.renderArea.offset.x = 0;
renderPassBeginInfo.renderArea.offset.y = 0;
renderPassBeginInfo.renderArea.extent.width = width;
renderPassBeginInfo.renderArea.extent.height = height;
renderPassBeginInfo.clearValueCount = 2;
renderPassBeginInfo.pClearValues = clearValues;
for (int32_t i = 0; i < drawCmdBuffers.size(); ++i)
{
// Set target frame buffer
renderPassBeginInfo.framebuffer = frameBuffers[i];
VK_CHECK_RESULT(vkBeginCommandBuffer(drawCmdBuffers[i], &cmdBufInfo));
2016-02-16 15:07:25 +01:00
vkCmdBeginRenderPass(drawCmdBuffers[i], &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
VkViewport viewport = vks::initializers::viewport((float)width, (float)height, 0.0f, 1.0f);
2016-02-16 15:07:25 +01:00
vkCmdSetViewport(drawCmdBuffers[i], 0, 1, &viewport);
VkRect2D scissor = vks::initializers::rect2D(width, height, 0, 0);
2016-02-16 15:07:25 +01:00
vkCmdSetScissor(drawCmdBuffers[i], 0, 1, &scissor);
vkCmdBindDescriptorSets(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, pipelineLayout, 0, 1, &descriptorSet, 0, NULL);
2016-05-15 11:13:57 +02:00
vkCmdBindPipeline(drawCmdBuffers[i], VK_PIPELINE_BIND_POINT_GRAPHICS, wireframe ? pipelines.wireframe : pipelines.solid);
2016-02-16 15:07:25 +01:00
drawglTFModel(drawCmdBuffers[i]);
/*
2016-02-16 15:07:25 +01:00
VkDeviceSize offsets[1] = { 0 };
// Bind mesh vertex buffer
2017-02-11 09:41:14 +01:00
vkCmdBindVertexBuffers(drawCmdBuffers[i], VERTEX_BUFFER_BIND_ID, 1, &model.vertices.buffer, offsets);
2016-02-16 15:07:25 +01:00
// Bind mesh index buffer
2017-02-11 09:41:14 +01:00
vkCmdBindIndexBuffer(drawCmdBuffers[i], model.indices.buffer, 0, VK_INDEX_TYPE_UINT32);
2020-01-12 12:54:41 +01:00
// Render mesh vertex buffer using its indices
2017-02-11 09:41:14 +01:00
vkCmdDrawIndexed(drawCmdBuffers[i], model.indices.count, 1, 0, 0, 0);
*/
2016-02-16 15:07:25 +01:00
drawUI(drawCmdBuffers[i]);
2016-02-16 15:07:25 +01:00
vkCmdEndRenderPass(drawCmdBuffers[i]);
VK_CHECK_RESULT(vkEndCommandBuffer(drawCmdBuffers[i]));
2016-02-16 15:07:25 +01:00
}
}
void drawglTFNode(VkCommandBuffer commandBuffer, ModelNode node)
2016-02-16 15:07:25 +01:00
{
if (node.indexCount > 0) {
vkCmdDrawIndexed(commandBuffer, node.indexCount, 1, node.firstIndex, 0, 0);
}
for (auto& child : node.children) {
drawglTFNode(commandBuffer, child);
}
}
void drawglTFModel(VkCommandBuffer commandBuffer)
{
// All vertices and indices are stored in single buffers, so we only need to bind once
VkDeviceSize offsets[1] = { 0 };
vkCmdBindVertexBuffers(commandBuffer, 0, 1, &model.vertices.buffer, offsets);
vkCmdBindIndexBuffer(commandBuffer, model.indices.buffer, 0, VK_INDEX_TYPE_UINT32);
for (auto& node : model.nodes) {
drawglTFNode(commandBuffer, node);
}
}
/*
Load images from the glTF file
Textures can be stored inside the glTF (which is the case for the sample model), so instead of directly
loading them from disk, we fetch them from the glTF loader and upload the buffers
*/
void loadglTFImages(tinygltf::Model& glTFModel)
{
model.images.resize(glTFModel.images.size());
for (size_t i = 0; i < glTFModel.images.size(); i++) {
tinygltf::Image& glTFImage = glTFModel.images[i];
// Get the image data from the glTF loader
unsigned char* buffer = nullptr;
VkDeviceSize bufferSize = 0;
bool deleteBuffer = false;
// We convert RGB-only images to RGBA, as most devices don't support RGB-formats in Vulkan
if (glTFImage.component == 3) {
bufferSize = glTFImage.width * glTFImage.height * 4;
buffer = new unsigned char[bufferSize];
unsigned char* rgba = buffer;
unsigned char* rgb = &glTFImage.image[0];
for (size_t i = 0; i < glTFImage.width * glTFImage.height; ++i) {
for (int32_t j = 0; j < 3; ++j) {
rgba[j] = rgb[j];
}
rgba += 4;
rgb += 3;
}
deleteBuffer = true;
}
else {
buffer = &glTFImage.image[0];
bufferSize = glTFImage.image.size();
}
model.images[i].fromBuffer(buffer, bufferSize, VK_FORMAT_R8G8B8A8_UNORM, glTFImage.width, glTFImage.height, vulkanDevice, queue);
}
}
/*
Load texture information
These nodes store the index of the image used by a material that sources this texture
*/
void loadglTFTextures(tinygltf::Model& glTFModel)
{
model.textures.resize(glTFModel.textures.size());
for (size_t i = 0; i < glTFModel.textures.size(); i++) {
model.textures[i] = glTFModel.textures[i].source;
}
}
/*
Load Materials from the glTF file
Materials contain basic properties like colors and references to the textures used by that material
We only read the most basic properties required for our sample
*/
void loadglTFMaterials(const tinygltf::Model& glTFModel)
{
model.materials.resize(glTFModel.materials.size());
for (size_t i = 0; i < glTFModel.materials.size(); i++) {
tinygltf::Material glTFMaterial = glTFModel.materials[i];
// Get the base color factor
if (glTFMaterial.values.find("baseColorFactor") != glTFMaterial.values.end()) {
model.materials[i].baseColorFactor = glm::make_vec4(glTFMaterial.values["baseColorFactor"].ColorFactor().data());
}
// Get base color texture index
if (glTFMaterial.values.find("baseColorTexture") != glTFMaterial.values.end()) {
model.materials[i].baseColorTextureIndex = glTFMaterial.values["baseColorTexture"].TextureIndex();
}
}
}
// Load a single glTF node
// glTF scenes are made up of nodes that contain mesh data
// This is the most basic way of loading a glTF node that ignores parent->child relations and nested matrices
void loadglTFNode(ModelNode* parent, const tinygltf::Node& glTFNode, const tinygltf::Model& glTFModel, std::vector<uint32_t>& indexBuffer, std::vector<Vertex>& vertexBuffer)
{
ModelNode node{};
node.matrix = glm::mat4(1.0f);
// Get the local node matrix
// It's either made up from translation, rotation, scale or a 4x4 matrix
if (glTFNode.translation.size() == 3) {
node.matrix = glm::translate(node.matrix, glm::vec3(glm::make_vec3(glTFNode.translation.data())));
}
if (glTFNode.rotation.size() == 4) {
glm::quat q = glm::make_quat(glTFNode.rotation.data());
node.matrix *= glm::mat4(q);
}
if (glTFNode.scale.size() == 3) {
node.matrix = glm::scale(node.matrix, glm::vec3(glm::make_vec3(glTFNode.translation.data())));
}
if (glTFNode.matrix.size() == 16) {
node.matrix = glm::make_mat4x4(glTFNode.matrix.data());
};
// Load node's children
if (glTFNode.children.size() > 0) {
for (size_t i = 0; i < glTFNode.children.size(); i++) {
loadglTFNode(&node, glTFModel.nodes[glTFNode.children[i]], glTFModel, indexBuffer, vertexBuffer);
}
}
// If the node contains mesh data, we load vertices and indices from the the buffers
// In glTF this is done via accessors and buffer views
if (glTFNode.mesh > -1) {
const tinygltf::Mesh mesh = glTFModel.meshes[glTFNode.mesh];
uint32_t indexStart = static_cast<uint32_t>(indexBuffer.size());
uint32_t vertexStart = static_cast<uint32_t>(vertexBuffer.size());
uint32_t indexCount = 0;
// Iterate through all primitives of this node's mesh
for (size_t i = 0; i < mesh.primitives.size(); i++) {
const tinygltf::Primitive& primitive = mesh.primitives[i];
// Vertices
{
const float* positionBuffer = nullptr;
const float* normalsBuffer = nullptr;
const float* texCoordsBuffer = nullptr;
size_t vertexCount = 0;
// Get buffer data for vertex normals
if (primitive.attributes.find("POSITION") != primitive.attributes.end()) {
const tinygltf::Accessor& accessor = glTFModel.accessors[primitive.attributes.find("POSITION")->second];
const tinygltf::BufferView& view = glTFModel.bufferViews[accessor.bufferView];
positionBuffer = reinterpret_cast<const float*>(&(glTFModel.buffers[view.buffer].data[accessor.byteOffset + view.byteOffset]));
vertexCount = accessor.count;
}
// Get buffer data for vertex normals
if (primitive.attributes.find("NORMAL") != primitive.attributes.end()) {
const tinygltf::Accessor& accessor = glTFModel.accessors[primitive.attributes.find("NORMAL")->second];
const tinygltf::BufferView& view = glTFModel.bufferViews[accessor.bufferView];
normalsBuffer = reinterpret_cast<const float*>(&(glTFModel.buffers[view.buffer].data[accessor.byteOffset + view.byteOffset]));
}
// Get buffer data for vertex texture coordinates
// glTF supports multiple sets, we only load the first one
if (primitive.attributes.find("TEXCOORD_0") != primitive.attributes.end()) {
const tinygltf::Accessor& accessor = glTFModel.accessors[primitive.attributes.find("TEXCOORD_0")->second];
const tinygltf::BufferView& view = glTFModel.bufferViews[accessor.bufferView];
texCoordsBuffer = reinterpret_cast<const float*>(&(glTFModel.buffers[view.buffer].data[accessor.byteOffset + view.byteOffset]));
}
// Append data to model's vertex buffer
for (size_t v = 0; v < vertexCount; v++) {
Vertex vert{};
vert.pos = glm::vec4(glm::make_vec3(&positionBuffer[v * 3]), 1.0f);
vert.normal = glm::normalize(glm::vec3(normalsBuffer ? glm::make_vec3(&normalsBuffer[v * 3]) : glm::vec3(0.0f)));
vert.uv = texCoordsBuffer ? glm::make_vec2(&texCoordsBuffer[v * 2]) : glm::vec3(0.0f);
vertexBuffer.push_back(vert);
}
}
// Indices
{
const tinygltf::Accessor& accessor = glTFModel.accessors[primitive.indices];
const tinygltf::BufferView& bufferView = glTFModel.bufferViews[accessor.bufferView];
const tinygltf::Buffer& buffer = glTFModel.buffers[bufferView.buffer];
indexCount += static_cast<uint32_t>(accessor.count);
switch (accessor.componentType) {
case TINYGLTF_PARAMETER_TYPE_UNSIGNED_INT: {
uint32_t* buf = new uint32_t[accessor.count];
memcpy(buf, &buffer.data[accessor.byteOffset + bufferView.byteOffset], accessor.count * sizeof(uint32_t));
for (size_t index = 0; index < accessor.count; index++) {
indexBuffer.push_back(buf[index] + vertexStart);
}
break;
}
case TINYGLTF_PARAMETER_TYPE_UNSIGNED_SHORT: {
uint16_t* buf = new uint16_t[accessor.count];
memcpy(buf, &buffer.data[accessor.byteOffset + bufferView.byteOffset], accessor.count * sizeof(uint16_t));
for (size_t index = 0; index < accessor.count; index++) {
indexBuffer.push_back(buf[index] + vertexStart);
}
break;
}
case TINYGLTF_PARAMETER_TYPE_UNSIGNED_BYTE: {
uint8_t* buf = new uint8_t[accessor.count];
memcpy(buf, &buffer.data[accessor.byteOffset + bufferView.byteOffset], accessor.count * sizeof(uint8_t));
for (size_t index = 0; index < accessor.count; index++) {
indexBuffer.push_back(buf[index] + vertexStart);
}
break;
}
default:
std::cerr << "Index component type " << accessor.componentType << " not supported!" << std::endl;
return;
}
}
}
node.firstIndex = indexStart;
node.indexCount = indexCount;
}
if (parent) {
parent->children.push_back(node);
}
else {
model.nodes.push_back(node);
}
}
// @todo
void loadglTF(std::string filename)
{
tinygltf::Model gltfModel;
tinygltf::TinyGLTF gltfContext;
std::string error, warning;
this->device = device;
#if defined(__ANDROID__)
// On Android all assets are packed with the apk in a compressed form, so we need to open them using the asset manager
AAsset* asset = AAssetManager_open(androidApp->activity->assetManager, filename.c_str(), AASSET_MODE_STREAMING);
assert(asset);
size_t size = AAsset_getLength(asset);
assert(size > 0);
char* fileData = new char[size];
AAsset_read(asset, fileData, size);
AAsset_close(asset);
std::string baseDir;
bool fileLoaded = gltfContext.LoadASCIIFromString(&gltfModel, &error, &warning, fileData, size, baseDir);
free(fileData);
#else
bool fileLoaded = gltfContext.LoadASCIIFromFile(&gltfModel, &error, &warning, filename);
#endif
std::vector<uint32_t> indexBuffer;
2016-02-16 15:07:25 +01:00
std::vector<Vertex> vertexBuffer;
if (fileLoaded) {
loadglTFImages(gltfModel);
loadglTFMaterials(gltfModel);
loadglTFTextures(gltfModel);
const tinygltf::Scene& scene = gltfModel.scenes[0];
for (size_t i = 0; i < scene.nodes.size(); i++) {
const tinygltf::Node node = gltfModel.nodes[scene.nodes[i]];
loadglTFNode(nullptr, node, gltfModel, indexBuffer, vertexBuffer);
2016-02-16 15:07:25 +01:00
}
}
else {
// TODO: throw
std::cerr << "Could not load gltf file: " << error << std::endl;
return;
2016-02-16 15:07:25 +01:00
}
size_t vertexBufferSize = vertexBuffer.size() * sizeof(Vertex);
size_t indexBufferSize = indexBuffer.size() * sizeof(uint32_t);
2017-02-11 09:41:14 +01:00
model.indices.count = static_cast<uint32_t>(indexBuffer.size());
2016-02-16 15:07:25 +01:00
//assert((vertexBufferSize > 0) && (indexBufferSize > 0));
2016-02-16 15:07:25 +01:00
struct StagingBuffer {
VkBuffer buffer;
VkDeviceMemory memory;
} vertexStaging, indexStaging;
// Create staging buffers
// Vertex data
VK_CHECK_RESULT(vulkanDevice->createBuffer(
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
vertexBufferSize,
&vertexStaging.buffer,
&vertexStaging.memory,
vertexBuffer.data()));
// Index data
VK_CHECK_RESULT(vulkanDevice->createBuffer(
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
indexBufferSize,
&indexStaging.buffer,
&indexStaging.memory,
indexBuffer.data()));
// Create device local buffers
// Vertex buffer
VK_CHECK_RESULT(vulkanDevice->createBuffer(
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
vertexBufferSize,
&model.vertices.buffer,
&model.vertices.memory));
// Index buffer
VK_CHECK_RESULT(vulkanDevice->createBuffer(
VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
indexBufferSize,
&model.indices.buffer,
&model.indices.memory));
// Copy data from staging buffers (host) do device local buffer (gpu)
VkCommandBuffer copyCmd = VulkanExampleBase::createCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY, true);
VkBufferCopy copyRegion = {};
copyRegion.size = vertexBufferSize;
vkCmdCopyBuffer(
copyCmd,
vertexStaging.buffer,
model.vertices.buffer,
1,
&copyRegion);
copyRegion.size = indexBufferSize;
vkCmdCopyBuffer(
copyCmd,
indexStaging.buffer,
model.indices.buffer,
1,
&copyRegion);
VulkanExampleBase::flushCommandBuffer(copyCmd, queue, true);
vkDestroyBuffer(device, vertexStaging.buffer, nullptr);
vkFreeMemory(device, vertexStaging.memory, nullptr);
vkDestroyBuffer(device, indexStaging.buffer, nullptr);
vkFreeMemory(device, indexStaging.memory, nullptr);
2016-02-16 15:07:25 +01:00
}
void loadAssets()
2016-02-16 15:07:25 +01:00
{
loadglTF(getAssetPath() + "models/voyager/voyager.gltf");
textures.colorMap.loadFromFile(getAssetPath() + "models/voyager/voyager_rgba_unorm.ktx", VK_FORMAT_R8G8B8A8_UNORM, vulkanDevice, queue);
2016-02-16 15:07:25 +01:00
}
void setupDescriptorPool()
{
// Example uses one ubo and one combined image sampler
std::vector<VkDescriptorPoolSize> poolSizes =
{
vks::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1),
vks::initializers::descriptorPoolSize(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1),
2016-02-16 15:07:25 +01:00
};
VkDescriptorPoolCreateInfo descriptorPoolInfo =
vks::initializers::descriptorPoolCreateInfo(
static_cast<uint32_t>(poolSizes.size()),
2016-02-16 15:07:25 +01:00
poolSizes.data(),
1);
2016-02-16 15:07:25 +01:00
VK_CHECK_RESULT(vkCreateDescriptorPool(device, &descriptorPoolInfo, nullptr, &descriptorPool));
2016-02-16 15:07:25 +01:00
}
void setupDescriptorSetLayout()
{
std::vector<VkDescriptorSetLayoutBinding> setLayoutBindings =
{
// Binding 0 : Vertex shader uniform buffer
vks::initializers::descriptorSetLayoutBinding(
2016-02-16 15:07:25 +01:00
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
VK_SHADER_STAGE_VERTEX_BIT,
0),
// Binding 1 : Fragment shader combined sampler
vks::initializers::descriptorSetLayoutBinding(
2016-02-16 15:07:25 +01:00
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
VK_SHADER_STAGE_FRAGMENT_BIT,
1),
};
VkDescriptorSetLayoutCreateInfo descriptorLayout =
vks::initializers::descriptorSetLayoutCreateInfo(
2016-02-16 15:07:25 +01:00
setLayoutBindings.data(),
static_cast<uint32_t>(setLayoutBindings.size()));
2016-02-16 15:07:25 +01:00
VK_CHECK_RESULT(vkCreateDescriptorSetLayout(device, &descriptorLayout, nullptr, &descriptorSetLayout));
2016-02-16 15:07:25 +01:00
VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo =
vks::initializers::pipelineLayoutCreateInfo(
2016-02-16 15:07:25 +01:00
&descriptorSetLayout,
1);
VK_CHECK_RESULT(vkCreatePipelineLayout(device, &pPipelineLayoutCreateInfo, nullptr, &pipelineLayout));
2016-02-16 15:07:25 +01:00
}
void setupDescriptorSet()
{
VkDescriptorSetAllocateInfo allocInfo = vks::initializers::descriptorSetAllocateInfo(descriptorPool, &descriptorSetLayout, 1);
VK_CHECK_RESULT(vkAllocateDescriptorSets(device, &allocInfo, &descriptorSet));
2016-02-16 15:07:25 +01:00
std::vector<VkWriteDescriptorSet> writeDescriptorSets = {
2016-02-16 15:07:25 +01:00
// Binding 0 : Vertex shader uniform buffer
vks::initializers::writeDescriptorSet(descriptorSet, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0, &uniformBuffers.scene.descriptor),
2016-02-16 15:07:25 +01:00
// Binding 1 : Color map
vks::initializers::writeDescriptorSet(descriptorSet, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1, &textures.colorMap.descriptor)
2016-02-16 15:07:25 +01:00
};
vkUpdateDescriptorSets(device, static_cast<uint32_t>(writeDescriptorSets.size()), writeDescriptorSets.data(), 0, NULL);
2016-02-16 15:07:25 +01:00
}
void preparePipelines()
{
VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCI = vks::initializers::pipelineInputAssemblyStateCreateInfo(VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 0, VK_FALSE);
VkPipelineRasterizationStateCreateInfo rasterizationStateCI = vks::initializers::pipelineRasterizationStateCreateInfo(VK_POLYGON_MODE_FILL, VK_CULL_MODE_BACK_BIT, VK_FRONT_FACE_CLOCKWISE, 0);
VkPipelineColorBlendAttachmentState blendAttachmentStateCI = vks::initializers::pipelineColorBlendAttachmentState(0xf, VK_FALSE);
VkPipelineColorBlendStateCreateInfo colorBlendStateCI = vks::initializers::pipelineColorBlendStateCreateInfo(1, &blendAttachmentStateCI);
VkPipelineDepthStencilStateCreateInfo depthStencilStateCI = vks::initializers::pipelineDepthStencilStateCreateInfo(VK_TRUE, VK_TRUE, VK_COMPARE_OP_LESS_OR_EQUAL);
VkPipelineViewportStateCreateInfo viewportStateCI = vks::initializers::pipelineViewportStateCreateInfo(1, 1, 0);
VkPipelineMultisampleStateCreateInfo multisampleStateCI = vks::initializers::pipelineMultisampleStateCreateInfo(VK_SAMPLE_COUNT_1_BIT, 0);
const std::vector<VkDynamicState> dynamicStateEnables = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR };
VkPipelineDynamicStateCreateInfo dynamicStateCI = vks::initializers::pipelineDynamicStateCreateInfo(dynamicStateEnables.data(), static_cast<uint32_t>(dynamicStateEnables.size()), 0);
// Vertex input bindings and attributes
const std::vector<VkVertexInputBindingDescription> vertexInputBindings = {
vks::initializers::vertexInputBindingDescription(0, sizeof(Vertex), VK_VERTEX_INPUT_RATE_VERTEX),
2016-02-16 15:07:25 +01:00
};
const std::vector<VkVertexInputAttributeDescription> vertexInputAttributes = {
vks::initializers::vertexInputAttributeDescription(0, 0, VK_FORMAT_R32G32B32_SFLOAT, offsetof(Vertex, pos)), // Location 0: Position
vks::initializers::vertexInputAttributeDescription(0, 1, VK_FORMAT_R32G32B32_SFLOAT, offsetof(Vertex, normal)), // Location 1: Normal
vks::initializers::vertexInputAttributeDescription(0, 2, VK_FORMAT_R32G32B32_SFLOAT, offsetof(Vertex, uv)), // Location 2: Texture coordinates
vks::initializers::vertexInputAttributeDescription(0, 3, VK_FORMAT_R32G32B32_SFLOAT, offsetof(Vertex, color)), // Location 3: Color
};
VkPipelineVertexInputStateCreateInfo vertexInputStateCI = vks::initializers::pipelineVertexInputStateCreateInfo();
vertexInputStateCI.vertexBindingDescriptionCount = static_cast<uint32_t>(vertexInputBindings.size());
vertexInputStateCI.pVertexBindingDescriptions = vertexInputBindings.data();
vertexInputStateCI.vertexAttributeDescriptionCount = static_cast<uint32_t>(vertexInputAttributes.size());
vertexInputStateCI.pVertexAttributeDescriptions = vertexInputAttributes.data();
const std::array<VkPipelineShaderStageCreateInfo, 2> shaderStages = {
loadShader(getAssetPath() + "shaders/mesh/mesh.vert.spv", VK_SHADER_STAGE_VERTEX_BIT),
loadShader(getAssetPath() + "shaders/mesh/mesh.frag.spv", VK_SHADER_STAGE_FRAGMENT_BIT)
};
VkGraphicsPipelineCreateInfo pipelineCI = vks::initializers::pipelineCreateInfo(pipelineLayout, renderPass, 0);
pipelineCI.pVertexInputState = &vertexInputStateCI;
pipelineCI.pInputAssemblyState = &inputAssemblyStateCI;
pipelineCI.pRasterizationState = &rasterizationStateCI;
pipelineCI.pColorBlendState = &colorBlendStateCI;
pipelineCI.pMultisampleState = &multisampleStateCI;
pipelineCI.pViewportState = &viewportStateCI;
pipelineCI.pDepthStencilState = &depthStencilStateCI;
pipelineCI.pDynamicState = &dynamicStateCI;
pipelineCI.stageCount = static_cast<uint32_t>(shaderStages.size());
pipelineCI.pStages = shaderStages.data();
2016-02-16 15:07:25 +01:00
// Solid rendering pipeline
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCI, nullptr, &pipelines.solid));
2016-05-15 11:13:57 +02:00
// Wire frame rendering pipeline
if (deviceFeatures.fillModeNonSolid) {
rasterizationStateCI.polygonMode = VK_POLYGON_MODE_LINE;
rasterizationStateCI.lineWidth = 1.0f;
VK_CHECK_RESULT(vkCreateGraphicsPipelines(device, pipelineCache, 1, &pipelineCI, nullptr, &pipelines.wireframe));
}
2016-02-16 15:07:25 +01:00
}
// Prepare and initialize uniform buffer containing shader uniforms
void prepareUniformBuffers()
{
// Vertex shader uniform buffer block
VK_CHECK_RESULT(vulkanDevice->createBuffer(
2016-02-16 15:07:25 +01:00
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
2016-06-05 20:28:39 +02:00
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
&uniformBuffers.scene,
sizeof(uboVS)));
// Map persistent
VK_CHECK_RESULT(uniformBuffers.scene.map());
2016-02-16 15:07:25 +01:00
updateUniformBuffers();
}
void updateUniformBuffers()
{
uboVS.projection = glm::perspective(glm::radians(60.0f), (float)width / (float)height, 0.1f, 256.0f);
2017-09-24 18:17:07 +02:00
glm::mat4 viewMatrix = glm::translate(glm::mat4(1.0f), glm::vec3(0.0f, 0.0f, zoom));
2016-02-16 15:07:25 +01:00
2017-09-24 18:17:07 +02:00
uboVS.model = viewMatrix * glm::translate(glm::mat4(1.0f), cameraPos);
uboVS.model = glm::rotate(uboVS.model, glm::radians(rotation.x), glm::vec3(1.0f, 0.0f, 0.0f));
uboVS.model = glm::rotate(uboVS.model, glm::radians(rotation.y), glm::vec3(0.0f, 1.0f, 0.0f));
uboVS.model = glm::rotate(uboVS.model, glm::radians(rotation.z), glm::vec3(0.0f, 0.0f, 1.0f));
2016-02-16 15:07:25 +01:00
memcpy(uniformBuffers.scene.mapped, &uboVS, sizeof(uboVS));
2016-02-16 15:07:25 +01:00
}
2016-06-05 20:28:39 +02:00
void draw()
{
VulkanExampleBase::prepareFrame();
// Command buffer to be sumitted to the queue
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &drawCmdBuffers[currentBuffer];
// Submit to queue
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, VK_NULL_HANDLE));
VulkanExampleBase::submitFrame();
}
2016-02-16 15:07:25 +01:00
void prepare()
{
VulkanExampleBase::prepare();
loadAssets();
2016-02-16 15:07:25 +01:00
prepareUniformBuffers();
setupDescriptorSetLayout();
preparePipelines();
setupDescriptorPool();
setupDescriptorSet();
buildCommandBuffers();
prepared = true;
}
virtual void render()
{
if (!prepared)
return;
draw();
if (camera.updated)
updateUniformBuffers();
2016-02-16 15:07:25 +01:00
}
2016-05-15 11:13:57 +02:00
virtual void OnUpdateUIOverlay(vks::UIOverlay *overlay)
2016-05-15 11:13:57 +02:00
{
if (overlay->header("Settings")) {
if (overlay->checkBox("Wireframe", &wireframe)) {
buildCommandBuffers();
}
}
2016-06-05 20:28:39 +02:00
}
2016-02-16 15:07:25 +01:00
};
VULKAN_EXAMPLE_MAIN()