Added slang shaders for hdr and graphics pipeline library samples

This commit is contained in:
Sascha Willems 2025-05-18 11:21:50 +02:00
parent 381c6eff03
commit b81e9d9654
5 changed files with 347 additions and 1 deletions

View file

@ -17,6 +17,10 @@ def checkRenameFiles(samplename):
"normaldebug.vert.spv": "base.vert.spv", "normaldebug.vert.spv": "base.vert.spv",
"normaldebug.frag.spv": "base.frag.spv", "normaldebug.frag.spv": "base.frag.spv",
} }
case "graphicspipelinelibrary":
mappings = {
"uber.vert.spv": "shared.vert.spv",
}
case "raytracingbasic": case "raytracingbasic":
mappings = { mappings = {
"raytracingbasic.rchit.spv": "closesthit.rchit.spv", "raytracingbasic.rchit.spv": "closesthit.rchit.spv",

View file

@ -0,0 +1,99 @@
/* Copyright (c) 2025, Sascha Willems
*
* SPDX-License-Identifier: MIT
*
*/
struct VSInput
{
float3 Pos;
float3 Normal;
float3 Color;
};
struct VSOutput
{
float4 Pos : SV_POSITION;
float3 Normal;
float3 Color;
float3 ViewVec;
float3 LightVec;
};
struct UBO
{
float4x4 projection;
float4x4 model;
float4 lightPos;
};
ConstantBuffer<UBO> ubo;
// We use this constant to control the flow of the shader depending on the
// lighting model selected at pipeline creation time
[[SpecializationConstant]] const int LIGHTING_MODEL = 0;
[shader("vertex")]
VSOutput vertexMain(VSInput input)
{
VSOutput output;
output.Color = input.Color;
output.Pos = mul(ubo.projection, mul(ubo.model, float4(input.Pos.xyz, 1.0)));
float4 pos = mul(ubo.model, float4(input.Pos, 1.0));
output.Normal = mul((float3x3)ubo.model, input.Normal);
float3 lPos = mul((float3x3)ubo.model, ubo.lightPos.xyz);
output.LightVec = lPos - pos.xyz;
output.ViewVec = -pos.xyz;
return output;
}
[shader("fragment")]
float4 fragmentMain(VSOutput input)
{
float3 outColor = float3(0.0);
switch (LIGHTING_MODEL) {
case 0: // Phong
{
float3 ambient = input.Color * float3(0.25, 0.25, 0.25);
float3 N = normalize(input.Normal);
float3 L = normalize(input.LightVec);
float3 V = normalize(input.ViewVec);
float3 R = reflect(-L, N);
float3 diffuse = max(dot(N, L), 0.0) * input.Color;
float3 specular = pow(max(dot(R, V), 0.0), 32.0) * float3(0.75);
outColor = ambient + diffuse * 1.75 + specular;
break;
}
case 1: // Toon
{
float3 N = normalize(input.Normal);
float3 L = normalize(input.LightVec);
float intensity = dot(N, L);
float3 color;
if (intensity > 0.98)
color = input.Color * 1.5;
else if (intensity > 0.9)
color = input.Color * 1.0;
else if (intensity > 0.5)
color = input.Color * 0.6;
else if (intensity > 0.25)
color = input.Color * 0.4;
else
color = input.Color * 0.2;
outColor = color;
break;
}
case 2: // No shading
{
outColor = input.Color;
break;
}
case 3: // Greyscale
outColor = dot(input.Color, float3(0.299, 0.587, 0.114));
break;
}
// The scene itself is a bit dark, so brigthen it up a bit
return float4(outColor * 1.25, 1.0);
}

View file

@ -0,0 +1,80 @@
/* Copyright (c) 2025, Sascha Willems
*
* SPDX-License-Identifier: MIT
*
*/
struct VSOutput
{
float4 Pos : SV_POSITION;
float2 UV;
};
Sampler2D samplerColor0;
Sampler2D samplerColor1;
[[SpecializationConstant]] const int dir = 0;
[shader("vertex")]
VSOutput vertexMain(uint VertexIndex: SV_VertexID)
{
VSOutput output;
output.UV = float2((VertexIndex << 1) & 2, VertexIndex & 2);
output.Pos = float4(output.UV * 2.0f - 1.0f, 0.0f, 1.0f);
return output;
}
[shader("fragment")]
float4 fragmentMain(VSOutput input)
{
// From the OpenGL Super bible
const float weights[] = { 0.0024499299678342,
0.0043538453346397,
0.0073599963704157,
0.0118349786570722,
0.0181026699707781,
0.0263392293891488,
0.0364543006660986,
0.0479932050577658,
0.0601029809166942,
0.0715974486241365,
0.0811305381519717,
0.0874493212267511,
0.0896631113333857,
0.0874493212267511,
0.0811305381519717,
0.0715974486241365,
0.0601029809166942,
0.0479932050577658,
0.0364543006660986,
0.0263392293891488,
0.0181026699707781,
0.0118349786570722,
0.0073599963704157,
0.0043538453346397,
0.0024499299678342};
const float blurScale = 0.003;
const float blurStrength = 1.0;
float ar = 1.0;
// Aspect ratio for vertical blur pass
if (dir == 1)
{
float2 ts;
samplerColor1.GetDimensions(ts.x, ts.y);
ar = ts.y / ts.x;
}
float2 P = input.UV.yx - float2(0, (25 >> 1) * ar * blurScale);
float4 color = float4(0.0, 0.0, 0.0, 0.0);
for (int i = 0; i < 25; i++)
{
float2 dv = float2(0.0, i * blurScale) * ar;
color += samplerColor1.Sample(P + dv) * weights[i] * blurStrength;
}
return color;
}

View file

@ -0,0 +1,28 @@
/* Copyright (c) 2025, Sascha Willems
*
* SPDX-License-Identifier: MIT
*
*/
struct VSOutput
{
float4 Pos : SV_POSITION;
float2 UV;
};
Sampler2D samplerColor;
[shader("vertex")]
VSOutput vertexMain(uint VertexIndex: SV_VertexID)
{
VSOutput output;
output.UV = float2((VertexIndex << 1) & 2, VertexIndex & 2);
output.Pos = float4(output.UV * 2.0f - 1.0f, 0.0f, 1.0f);
return output;
}
[shader("fragment")]
float4 fragmentMain(VSOutput input)
{
return samplerColor.Sample(input.UV);
}

View file

@ -0,0 +1,135 @@
/* Copyright (c) 2025, Sascha Willems
*
* SPDX-License-Identifier: MIT
*
*/
struct VSInput
{
float3 Pos;
float3 Normal;
};
struct VSOutput
{
float4 Pos : SV_POSITION;
float3 UVW;
float3 WorldPos;
float3 Normal;
float3 ViewVec;
float3 LightVec;
};
struct FSOutput
{
float4 Color0 : SV_TARGET0;
float4 Color1 : SV_TARGET1;
};
struct UBO {
float4x4 projection;
float4x4 modelview;
float4x4 inverseModelview;
float exposure;
};
ConstantBuffer<UBO> ubo;
SamplerCube samplerEnvMap;
[[SpecializationConstant]] const int objectType = 0;
[shader("vertex")]
VSOutput vertexMain(VSInput input)
{
VSOutput output;
output.UVW = input.Pos;
switch (objectType) {
case 0: // Skybox
output.WorldPos = mul((float4x3)ubo.modelview, input.Pos).xyz;
output.Pos = mul(ubo.projection, float4(output.WorldPos, 1.0));
break;
case 1: // Object
output.WorldPos = mul(ubo.modelview, float4(input.Pos, 1.0)).xyz;
output.Pos = mul(ubo.projection, mul(ubo.modelview, float4(input.Pos.xyz, 1.0)));
break;
}
output.WorldPos = mul(ubo.modelview, float4(input.Pos, 1.0)).xyz;
output.Normal = mul((float4x3)ubo.modelview, input.Normal).xyz;
float3 lightPos = float3(0.0f, -5.0f, 5.0f);
output.LightVec = lightPos.xyz - output.WorldPos.xyz;
output.ViewVec = -output.WorldPos.xyz;
return output;
}
[shader("fragment")]
FSOutput fragmentMain(VSOutput input)
{
FSOutput output;
float4 color;
float3 wcNormal;
switch (objectType) {
case 0: // Skybox
{
float3 normal = normalize(input.UVW);
color = samplerEnvMap.Sample(normal);
}
break;
case 1: // Reflect
{
float3 wViewVec = mul((float4x3)ubo.inverseModelview, normalize(input.ViewVec)).xyz;
float3 normal = normalize(input.Normal);
float3 wNormal = mul((float4x3)ubo.inverseModelview, normal).xyz;
float NdotL = max(dot(normal, input.LightVec), 0.0);
float3 eyeDir = normalize(input.ViewVec);
float3 halfVec = normalize(input.LightVec + eyeDir);
float NdotH = max(dot(normal, halfVec), 0.0);
float NdotV = max(dot(normal, eyeDir), 0.0);
float VdotH = max(dot(eyeDir, halfVec), 0.0);
// Geometric attenuation
float NH2 = 2.0 * NdotH;
float g1 = (NH2 * NdotV) / VdotH;
float g2 = (NH2 * NdotL) / VdotH;
float geoAtt = min(1.0, min(g1, g2));
const float F0 = 0.6;
const float k = 0.2;
// Fresnel (schlick approximation)
float fresnel = pow(1.0 - VdotH, 5.0);
fresnel *= (1.0 - F0);
fresnel += F0;
float spec = (fresnel * geoAtt) / (NdotV * NdotL * 3.14);
color = samplerEnvMap.Sample(reflect(-wViewVec, wNormal));
color = float4(color.rgb * NdotL * (k + spec * (1.0 - k)), 1.0);
}
break;
case 2: // Refract
{
float3 wViewVec = mul((float4x3)ubo.inverseModelview, normalize(input.ViewVec)).xyz;
float3 wNormal = mul((float4x3)ubo.inverseModelview, input.Normal).xyz;
color = samplerEnvMap.Sample(refract(-wViewVec, wNormal, 1.0/1.6));
}
break;
}
// Color with manual exposure into attachment 0
output.Color0.rgb = float3(1.0, 1.0, 1.0) - exp(-color.rgb * ubo.exposure);
// Bright parts for bloom into attachment 1
float l = dot(output.Color0.rgb, float3(0.2126, 0.7152, 0.0722));
float threshold = 0.75;
output.Color1.rgb = (l > threshold) ? output.Color0.rgb : float3(0.0, 0.0, 0.0);
output.Color1.a = 1.0;
return output;
}