#include "../shaderenv.h" //////////////////// // Screen Spaced Ambient Occlusion shader // based on shader of Alexander Kusternig #define USE_EYE_SPACE_DEPTH 1 struct fragment { // normalized screen position float4 pos: WPOS; float2 texCoord: TEXCOORD0; float3 view: TEXCOORD1; }; struct pixel { float4 illum_col: COLOR0; }; inline float2 myreflect(float2 pt, float2 n) { // distance to plane float d = dot(n, pt); // reflect around plane float2 rpt = pt - d * 2.0f * n; return rpt; } inline float3 Interpol(float2 w, float3 bl, float3 br, float3 tl, float3 tr) { float3 x1 = lerp(bl, tl, w.y); float3 x2 = lerp(br, tr, w.y); float3 v = lerp(x1, x2, w.x); return v; } // reconstruct world space position inline float3 ReconstructSamplePosition(float3 eyePos, uniform sampler2D colors, float2 texcoord, float3 bl, float3 br, float3 tl, float3 tr) { #if USE_EYE_SPACE_DEPTH float eyeSpaceDepth = tex2Dlod(colors, float4(texcoord, 0, 0)).w; //float3 rotView = normalize(Interpol(texcoord, bl, br, tl, tr)); float3 rotView = Interpol(texcoord, bl, br, tl, tr); float3 samplePos = eyePos - rotView * eyeSpaceDepth; #else float3 samplePos = tex2Dlod(colors, float4(texcoord, 0, SSAO_MIPMAP_LEVEL)).xyz; #endif return samplePos; } /** The ssao shader returning the an intensity value between 0 and 1 */ float2 ssao(fragment IN, uniform sampler2D colors, uniform sampler2D noiseTexture, uniform float2 samples[NUM_SAMPLES], uniform float3 currentNormal, uniform float3 centerPosition, uniform float scaleFactor, uniform float3 eyePos, uniform float3 bl, uniform float3 br, uniform float3 tl, uniform float3 tr //, uniform float3 viewDir ) { // Check in a circular area around the current position. // Shoot vectors to the positions there, and check the angle to these positions. // Summing up these angles gives an estimation of the occlusion at the current position. float total_ao = 0.0; float numSamples = 0; for (int i = 0; i < NUM_SAMPLES; ++ i) { const float2 offset = samples[i]; #if 1 //////////////////// // add random noise: reflect around random normal vector (warning: slow!) float2 mynoise = tex2D(noiseTexture, IN.texCoord.xy).xy; const float2 offsetTransformed = myreflect(offset, mynoise); #else const float2 offsetTransformed = offset; #endif // weight with projected coordinate to reach similar kernel size for near and far float2 texcoord = IN.texCoord.xy + offsetTransformed * AREA_SIZE * scaleFactor; //if ((texcoord.x <= 1.0f) && (texcoord.x >= 0.0f) && (texcoord.y <= 1.0f) && (texcoord.y >= 0.0f))++ numSamples; float3 samplePos = ReconstructSamplePosition(eyePos, colors, texcoord, bl, br, tl, tr); /////// //-- compute contribution of current sample taking into account direction and angle float3 dirSample = samplePos - centerPosition.xyz; const float lengthSample = length(dirSample); float3 nDirSample = dirSample / lengthSample; // angle between current normal and direction to sample controls AO intensity. const float cos_angle = max(dot(nDirSample, currentNormal), 0.0f); // the distance_scale offset is used to avoid singularity that occurs at global illumination when // the distance to a sample approaches zero const float intensity = (SAMPLE_INTENSITY * DISTANCE_SCALE) / (DISTANCE_SCALE + lengthSample * lengthSample); #if 0 // if surface normal perpenticular to view dir, approx. half of the samples will not count // => compensate for this (on the other hand, projected sampling area could be larger!) const float viewCorrection = 1.0f + VIEW_CORRECTION_SCALE * (1.0f - dot(currentViewDir, currentNormal)); total_ao += cos_angle * intensity * viewCorrection; #endif total_ao += cos_angle * intensity; } return float2(max(0.0f, 1.0f - total_ao), numSamples); } /** The mrt shader for screen space ambient occlusion */ pixel main(fragment IN, uniform sampler2D colors, uniform sampler2D positions, uniform sampler2D normals, uniform sampler2D noiseTexture, uniform float2 samples[NUM_SAMPLES], uniform sampler2D oldTex, const uniform float4x4 oldModelViewProj, const uniform float4x4 modelViewProj, uniform float maxDepth, uniform float temporalCoherence, uniform float3 eyePos, uniform float3 bl, uniform float3 br, uniform float3 tl, uniform float3 tr ) { pixel OUT; float4 norm = tex2Dlod(normals, float4(IN.texCoord, 0 ,0)); float3 normal = normalize(norm.xyz); // the w coordinate from the persp. projection float w = norm.w; #if USE_EYE_SPACE_DEPTH /// reconstruct position from the eye space depth float3 viewDir = IN.view; const float eyeDepth = tex2Dlod(colors, float4(IN.texCoord, 0, 0)).w; float3 centerPosition; centerPosition.xyz = eyePos - viewDir * eyeDepth; const float2 ao = ssao(IN, colors, noiseTexture, samples, normal, centerPosition, w, eyePos, bl, br, tl, tr); #else // the current world position const float3 centerPosition = tex2Dlod(positions, float4(IN.texCoord, 0, 0)).xyz; const float2 ao = ssao(IN, positions, noiseTexture, samples, normal, centerPosition, w, eyePos, bl, br, tl, tr); #endif ///////////////// //-- compute temporally smoothing float4 realPos = float4(centerPosition * maxDepth, 1.0f); // calculcate the current projected depth for next frame float4 currentPos = mul(modelViewProj, realPos); currentPos /= currentPos.w; const float currentDepth = currentPos.z; /////////// //-- reprojection new frame into old one // calculate projected depth float4 projPos = mul(oldModelViewProj, realPos); projPos /= projPos.w; // the current depth projected into the old frame const float projDepth = projPos.z; // fit from unit cube into 0 .. 1 float2 tex = (projPos.xy) * 0.5f + 0.5f; // retrieve the sample from the last frame float4 oldCol = tex2D(oldTex, tex); const float oldDepth = oldCol.w; const float depthDif = 1.0f - projDepth / oldDepth; //const float oldNumSamples = oldCol.y; const float oldWeight = clamp(oldCol.z, 0, temporalCoherence); float newWeight; // the number of valid samples in this frame //const float newNumSamples = ao.y; if (//(temporalCoherence > 0) && (tex.x >= 0.0f) && (tex.x < 1.0f) && (tex.y >= 0.0f) && (tex.y < 1.0f) && (abs(depthDif) < MIN_DEPTH_DIFF) // if visibility changed in the surrounding area we have to recompute //&& (oldNumSamples > 0.8f * newNumSamples) ) { // increase the weight for convergence newWeight = oldWeight + 1.0f; OUT.illum_col.xy = (ao.xy + oldCol.xy * oldWeight) / newWeight; //if (!(oldNumSamples > ao.y - 1.5f)) newWeight = 0; } else { OUT.illum_col.xy = ao.xy; newWeight = 0; } OUT.illum_col.z = newWeight; OUT.illum_col.w = currentDepth; return OUT; } pixel combine(fragment IN, uniform sampler2D colors, uniform sampler2D ssaoTex) { pixel OUT; float4 col = tex2Dlod(colors, float4(IN.texCoord, 0, 0)); float4 ao = tex2Dlod(ssaoTex, float4(IN.texCoord, 0, 0)); OUT.illum_col = col * ao.x; //OUT.illum_col = float4(ao.x,ao.x,ao.x, ao.w); OUT.illum_col.w = col.w; return OUT; }