struct fragment { float4 pos: WPOS; // normalized screen position float4 view: COLOR; float4 texcoord: TEXCOORD0; float4 lindepth: TEXCOORD1; }; struct pixel { float4 col: COLOR0; }; float3 reflect(float3 pt, float3 n) { // distance to plane float d = dot(n, pt); // reflect around plane float3 rpt = pt - d * 2.0f * n; //return pt; return rpt; } pixel main(fragment IN, uniform sampler2D lindepth, uniform sampler2D scene, uniform sampler2D normal, uniform float invTexSize, uniform float radius, uniform float3 eyevec, uniform float3 samples[32]) { pixel OUT; // eye space z float eyez = tex2D(lindepth, IN.texcoord.xy).x; float3 viewvec = IN.view.xyz * 2.0f - float3(1.0f); viewvec /= viewvec.z; //return float4(viewvec.xyz, 1.0f); // eye point //float3 eyept = eyez * viewvec; float3 eyept = float3(IN.texcoord.xy, eyez); float4 pl = tex2D(normal, IN.pos.xy * invTexSize); pl = pl * 2.0 - float4(1.0); float occlusion = 0.0; // gather occlusion from surrounding samples for (int i = 0; i < 32; i ++) { // generate new sample point //float3 samplepos = eyept + radius * samples[i]; // create some dithering by reflecting the sample point alond some random plane float3 samplepos = eyept + radius * reflect(samples[i], pl.xyz); // project to texture space q: why the scaling? float2 sampletex = (samplepos.xy / samplepos.z);// * float2(0.75, 1.0); //float2 sampletexn = sampletex; // normalize to [0 .. 1], move eye point to center //float2 sampletexn = sampletex * 0.5f + float2(0.5f); float2 sampletexn = sampletex + float2(0.5f); // look up depth at sample point float depth = tex2D(lindepth, sampletexn).x; // compare: is point occluded? //float zdist = 50.0f * max(-samplepos.z + depth, 0.0f); float zdist = 50.0f * max(samplepos.z - depth, 0.0f); // occlusion factor shrinks quadratic with distance to occluder occlusion += 1.0 / (1.0 + zdist * zdist); //occlusion += 1.0 / (1.0 + zdist); } // normalize occlusion /= 32.0f; OUT.col = 1.0f - occlusion; // OUT.col = tex2D(scene, IN.texcoord.xy) * 0.5 + (1.0f - occlusion); return OUT; }