#include "../shaderenv.h" //////////////////// // Screen Spaced Ambient Occlusion shader // based on shader of Alexander Kusternig struct fragment { // normalized screen position float4 pos: WPOS; float2 texCoord: TEXCOORD0; float3 view: TEXCOORD1; }; struct pixel { float4 illum_col: COLOR0; }; float3 myreflect(float3 pt, float3 n) { // distance to plane float d = dot(n, pt); // reflect around plane float3 rpt = pt - d * 2.0f * n; return rpt; } /** The ssao shader returning the an intensity value between 0 and 1 */ float ssao(fragment IN, uniform sampler2D positions, uniform sampler2D noiseTexture, uniform float3 samples[NUM_SAMPLES], uniform float3 currentNormal, uniform float3 currentViewDir, uniform float noiseMultiplier, uniform float4 centerPosition, const uniform float4x4 modelViewProj, const float4 realPos ) { // the w coordinate from the persp. projection const float w = centerPosition.w; // Check in a circular area around the current position. // Shoot vectors to the positions there, and check the angle to these positions. // Summing up these angles gives an estimation of the occlusion at the current position. float total_ao = 0.0; float j = 0.0f; for (int i = 0; i < NUM_SAMPLES; ++ i) { const float3 offset = samples[i] * AREA_SIZE; //////////////////// // add random noise: r stores costheta, g stores sintheta float3 mynoise = (float3)tex2D(noiseTexture, IN.texCoord.xy * noiseMultiplier); const float3 offsetTransformed = reflect(offset, mynoise); //const float3 offsetTransformed = offset; // compute position const float4 offsetPos = float4(offsetTransformed + realPos.xyz, 1.0f); const float4 projPos = mul(modelViewProj, offsetPos); const float2 texcoord = projPos.xy / projPos.w; if ((texcoord.x <= 1.0f) || (texcoord.x >= 0.0f) || (texcoord.y <= 1.0f) || (texcoord.y >= 0.0f)) { ++ j; // sample downsampled texture in order to speed up texture accesses float3 sample_position = tex2Dlod(positions, float4(texcoord, 0, 1)).xyz; //float3 sample_position = tex2D(positions, texcoord).xyz; float3 vector_to_sample = sample_position - centerPosition.xyz; const float length_to_sample = length(vector_to_sample); float3 direction_to_sample = vector_to_sample / length_to_sample; // angle between current normal and direction to sample controls AO intensity. const float cos_angle = max(dot(direction_to_sample, currentNormal), 0.0f); // distance between current position and sample position controls AO intensity. const float distance_intensity = (SAMPLE_INTENSITY * DISTANCE_SCALE) / (DISTANCE_SCALE + length_to_sample * length_to_sample); // if surface normal perpenticular to view dir, approx. half of the samples will not count // => compensate for this (on the other hand, projected sampling area could be larger!) //const float view_correction = 1.0f + VIEW_CORRECTION_SCALE * (1.0f - dot(currentViewDir, currentNormal)); //total_ao += cos_angle * distance_intensity * view_correction; total_ao += cos_angle * distance_intensity; } } total_ao /= j; return max(0.0f, 1.0f - total_ao); //return saturate(dot(currentViewDir, currentNormal)); } /** The mrt shader for screen space ambient occlusion */ pixel main(fragment IN, uniform sampler2D colors, uniform sampler2D positions, uniform sampler2D normals, uniform sampler2D noiseTexture, uniform float3 samples[NUM_SAMPLES], uniform float noiseMultiplier, uniform sampler2D oldTex, const uniform float4x4 oldModelViewProj, const uniform float4x4 mymodelViewProj, uniform float maxDepth, uniform float temporalCoherence ) { pixel OUT; float4 norm = tex2D(normals, IN.texCoord.xy); // the ambient term const float amb = norm.w; // expand normal float3 normal = normalize(norm.xyz); /// the current view direction float3 viewDir;// = normalize(IN.view); // the current world position const float4 centerPosition = tex2D(positions, IN.texCoord.xy); // the current color const float4 currentCol = tex2D(colors, IN.texCoord.xy); const float currentDepth = currentCol.w; float4 realPos = centerPosition * maxDepth; realPos.w = 1.0f; const float ao = ssao(IN, positions, noiseTexture, samples, normal, viewDir, noiseMultiplier, centerPosition, mymodelViewProj, realPos); //const float ao = ssao(IN, positions, samples, normal, viewDir, centerPosition, mymodelViewProj, realPos); ///////////////// //-- compute temporally smoothing // reproject float4 oldPos = mul(oldModelViewProj, realPos); const float newDepth = oldPos.z / oldPos.w; float2 tex = (oldPos.xy / oldPos.w) * 0.5f + 0.5f; float4 oldCol = tex2D(oldTex, tex); const float oldDepth = oldCol.w; const float depthDif = 1.0f - newDepth / oldDepth; float oldWeight = clamp(oldCol.z, 0, temporalCoherence); float newWeight; if ((temporalCoherence > 0) && (tex.x >= 0.0f) && (tex.x < 1.0f) && (tex.y >= 0.0f) && (tex.y < 1.0f) && (abs(depthDif) < 1e-3f)) { newWeight = oldWeight + 1; //OUT.illum_col = (float4)ao * expFactor + oldCol.x * (1.0f - expFactor); OUT.illum_col = (float4)(ao + oldCol.x * oldWeight) / newWeight; } else { OUT.illum_col = (float4)ao; newWeight = 0; } OUT.illum_col.z = newWeight; OUT.illum_col.w = currentDepth; return OUT; } pixel combine(fragment IN, uniform sampler2D colors, uniform sampler2D ssaoTex) { pixel OUT; float4 col = tex2Dlod(colors, float4(IN.texCoord.xy, 0, 0)); float4 ao = tex2D(ssaoTex, IN.texCoord.xy); //OUT.illum_col = col * ao.x; OUT.illum_col = (float4)ao.x; OUT.illum_col.w = ao.w; return OUT; }