#include "../shaderenv.h" struct fragment { // normalized screen position float4 pos: WPOS; float2 texCoord: TEXCOORD0; float3 view: TEXCOORD1; }; struct pixel { float4 color: COLOR0; }; float2 myreflect(float2 pt, float2 n) { // distance to plane float d = dot(n, pt); // reflect around plane float2 rpt = pt - d * 2.0f * n; return rpt; } /** function for standard deferred shading */ float4 shade(fragment IN, uniform float4 color, uniform float3 normal, float3 lightDir) { // diffuse intensity const float angle = saturate(dot(normal, lightDir)); float4 lightDiffuse = glstate.light[0].diffuse; float4 diffuse = angle * lightDiffuse; // global ambient const float4 ambient = glstate.light[0].ambient; float4 outColor; // hack: prevent shading the sky if (color.w > 1e19f) outColor = color; else outColor = (ambient + diffuse) * color; return outColor; } /** The mrt shader for standard rendering */ pixel main(fragment IN, uniform sampler2D colors, uniform sampler2D normals, uniform float3 lightDir ) { pixel OUT; float4 norm = tex2D(normals, IN.texCoord); float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0)); float3 normal = normalize(norm.xyz); float4 col = shade(IN, color, normal, lightDir); OUT.color = col; // store scaled view vector so wie don't have to normalize for e.g., ssao OUT.color.w = color.w;// / length(IN.view); return OUT; } float CalcShadowTerm(fragment IN, uniform sampler2D shadowMap, uniform float scale, uniform float2 lightSpacePos, uniform float depth, uniform float2 samples[NUM_PCF_TABS], uniform float weights[NUM_PCF_TABS], uniform sampler2D noiseTexture ) { //float shadowDepth = tex2D(shadowMap, lightSpacePos).x; //return step(depth, shadowDepth); float total_d = .0f; float total_w = .0f; for (int i = 0; i < NUM_PCF_TABS; ++ i) { const float2 offset = samples[i]; const float w = weights[i]; #if 1 //////////////////// //-- add random noise: reflect around random normal vector (warning: slow!) float2 mynoise = tex2D(noiseTexture, IN.texCoord).xy; const float2 offsetTransformed = myreflect(offset, mynoise); #else const float2 offsetTransformed = offset; #endif // weight with projected coordinate to reach similar kernel size for near and far float2 texcoord = lightSpacePos + offsetTransformed * scale; float shadowDepth = tex2D(shadowMap, texcoord).x; total_d += w * step(depth, shadowDepth); total_w += w; } total_d /= (float)total_w; return total_d; } inline float3 Interpol(float2 w, float3 bl, float3 br, float3 tl, float3 tr) { float3 x1 = lerp(bl, tl, w.y); float3 x2 = lerp(br, tr, w.y); float3 v = lerp(x1, x2, w.x); return v; } pixel main_shadow(fragment IN, uniform sampler2D colors, uniform sampler2D positions, uniform sampler2D normals, uniform sampler2D shadowMap, uniform float4x4 shadowMatrix, uniform float sampleWidth, uniform sampler2D noiseTex, uniform float2 samples[NUM_PCF_TABS], uniform float weights[NUM_PCF_TABS], uniform float3 lightDir, uniform float3 eyePos, uniform float3 bl, uniform float3 br, uniform float3 tl, uniform float3 tr ) { pixel OUT; float4 norm = tex2D(normals, IN.texCoord.xy); const float3 normal = normalize(norm.xyz); float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0)); /// reconstruct position from the eye space depth float3 viewDir = IN.view; const float lenView = length(viewDir); viewDir /= lenView; const float eyeDepth = tex2Dlod(colors, float4(IN.texCoord, 0, 0)).w; const float4 worldPos = float4(eyePos - viewDir * eyeDepth, 1); // diffuse intensity const float angle = saturate(dot(normal, lightDir)); const float4 lightDiffuse = glstate.light[0].diffuse; float4 diffuse = lightDiffuse * angle; // hack: prevent shadowing the sky const bool useShading = (color.w < 1e19f); // calc diffuse illumination + shadow term if (useShading && (angle > 1e-3f) // shadow only if diffuse color has some minimum intensity ) { float4 lightSpacePos = mul(shadowMatrix, worldPos); lightSpacePos /= lightSpacePos.w; float shadowTerm = CalcShadowTerm(IN, shadowMap, sampleWidth, lightSpacePos.xy, lightSpacePos.z, samples, weights, noiseTex); diffuse *= shadowTerm; } // light ambient term const float4 ambient = glstate.light[0].ambient; // compute shading OUT.color = useShading ? (ambient + diffuse) * color : color; // store scaled view vector from now on so wie don't have to normalize later (e.g., for ssao) //OUT.color.w = color.w / lenView; return OUT; } #if 0 /** This shader computes the reprojection and stores reprojected color / depth values as well as a boolean that */ pixel Reproject(fragment IN, uniform sampler2D colors, uniform sampler2D normals) { float4 norm = tex2Dlod(normals, float4(IN.texCoord, 0 ,0)); const float3 normal = normalize(norm.xyz); /// reconstruct position from the eye space depth float3 viewDir = IN.view; const float eyeDepth = tex2Dlod(colors, float4(IN.texCoord, 0, 0)).w; const float3 eyeSpacePos = -viewDir * eyeDepth; const float4 worldPos = float4(eyePos + eyeSpacePos, 1.0f); //////////////// //-- calculcate the current projected posiion (also used for next frame) float4 currentPos = mul(modelViewProj, worldPos); const float w = SAMPLE_RADIUS / currentPos.w; currentPos /= currentPos.w; const float precisionScale = 1e-3f; const float currentDepth = currentPos.z * precisionScale; const float2 ao = ssao(IN, colors, noiseTex, samples, normal, eyeSpacePos, w, bl, br, tl, tr, normalize(viewDir)); ///////////////// //-- compute temporally smoothing // reproject new frame into old one // calculate projected depth float4 projPos = mul(oldModelViewProj, worldPos); projPos /= projPos.w; // the current depth projected into the old frame const float projDepth = projPos.z * precisionScale; // fit from unit cube into 0 .. 1 const float2 tex = projPos.xy * 0.5f + 0.5f; // retrieve the sample from the last frame float4 oldCol = tex2D(oldTex, tex); const float oldDepth = oldCol.z; //const float depthDif = 1.0f - projDepth / oldDepth; const float depthDif = projDepth - oldDepth; //const float oldNumSamples = oldCol.y; const float oldWeight = clamp(oldCol.y, 0, temporalCoherence); float newWeight; // the number of valid samples in this frame //const float newNumSamples = ao.y; if ((temporalCoherence > 0) && (tex.x >= 0.0f) && (tex.x < 1.0f) && (tex.y >= 0.0f) && (tex.y < 1.0f) && (abs(depthDif) < MIN_DEPTH_DIFF) && (abs(oldCol.x - ao.x) < 0.1f) // if visibility changed in the surrounding area we have to recompute //&& (oldNumSamples > 0.8f * newNumSamples) ) { // increase the weight for convergence newWeight = oldWeight + 1.0f; OUT.illum_col.x = (ao.x + oldCol.x * oldWeight) / newWeight; //if (!(oldNumSamples > ao.y - 1.5f)) newWeight = 0; } else { OUT.illum_col.x = ao.x; newWeight = .0f; } OUT.illum_col.y = newWeight; OUT.illum_col.z = currentDepth; return OUT; } #endif float4 Output(fragment IN, uniform sampler2D colors): COLOR { return tex2Dlod(colors, float4(IN.texCoord, 0, 0)); } float4 ScaleDepth(fragment IN, uniform sampler2D colors): COLOR { float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0)); // store scaled view vector so wie don't have to normalize for e.g., ssao color.w /= length(IN.view); return color; } float4 DownSample(fragment IN, uniform sampler2D colors, uniform float2 downSampleOffs[NUM_DOWNSAMPLES]): COLOR { // let bilinear filtering do its work float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0)); return color; }