source: GTP/trunk/App/Demos/Vis/FriendlyCulling/src/shaders/ssao.cg @ 3204

Revision 3204, 12.8 KB checked in by mattausch, 16 years ago (diff)

debug version showing a visualization of the confidence

Line 
1#include "../shaderenv.h"
2
3
4////////////////////
5// Screen Spaced Ambient Occlusion shader
6// based on shader of Alexander Kusternig
7
8
9#define USE_EYESPACE_DEPTH 1
10
11
12struct fragment
13{
14        float2 texCoord: TEXCOORD0;
15        float3 view: TEXCOORD1;
16};
17
18
19struct pixel
20{
21        float4 illum_col: COLOR0;
22};
23
24
25inline float occlusionPower(float radius, float dist)
26{
27        return 6.283185307179586476925286766559f * (1.0f - cos(asin(radius / dist)));
28}
29
30
31inline float SqrLen(float3 v)
32{
33        return v.x * v.x + v.y * v.y + v.z * v.z;
34}
35
36
37inline float2 myreflect(float2 pt, float2 n)
38{
39        // distance to plane
40        float d = dot(n, pt);
41        // reflect around plane
42        float2 rpt = pt - d * 2.0f * n;
43
44        return rpt;
45}
46
47
48inline float3 Interpol(float2 w, float3 bl, float3 br, float3 tl, float3 tr)
49{
50        float3 x1 = lerp(bl, tl, w.y);
51        float3 x2 = lerp(br, tr, w.y);
52        float3 v = lerp(x1, x2, w.x);
53
54        return v;
55}
56
57
58// reconstruct world space position
59inline float3 ReconstructSamplePos(float eyeSpaceDepth,
60                                                                   float2 texcoord,
61                                                                   float3 bl, float3 br, float3 tl, float3 tr)
62{
63        float3 viewVec = Interpol(texcoord, bl, br, tl, tr);
64        float3 samplePos = -viewVec * eyeSpaceDepth;
65
66        return samplePos;
67}
68
69
70
71/** This shader computes the reprojection and stores
72        the ssao value of the old pixel as well as the
73        weight of the pixel in the new frame.
74*/
75inline float2 temporalSmoothing(float4 worldPos,
76                                                                float eyeSpaceDepth,
77                                                                float2 texcoord0,
78                                                                float3 oldEyePos,
79                                                                sampler2D oldTex,
80                                                                float4x4 oldModelViewProj,
81                                                                sampler2D colors,
82                                                                float3 projPos,
83                                                                float invW,
84                                                                float3 oldbl,
85                                                                float3 oldbr,
86                                                                float3 oldtl,
87                                                                float3 oldtr,
88                                                                float3 diffVec
89                                                                )
90{
91        // compute position from old frame for dynamic objects + translational portion
92        const float3 translatedPos = diffVec - oldEyePos + worldPos.xyz;
93
94
95        /////////////////
96        //-- reproject into old frame and calculate texture position of sample in old frame
97
98        // note: the old model view matrix only holds the view orientation part
99        float4 backProjPos = mul(oldModelViewProj, float4(translatedPos, 1.0f));
100        backProjPos /= backProjPos.w;
101       
102        // fit from unit cube into 0 .. 1
103        const float2 oldTexCoords = backProjPos.xy * 0.5f + 0.5f;
104        // retrieve the sample from the last frame
105        const float4 oldPixel = tex2Dlod(oldTex, float4(oldTexCoords, .0f, .0f));
106
107        // the ssao value in the old frame
108        const float ssao = oldPixel.x;
109
110#if USE_EYESPACE_DEPTH
111
112        // calculate eye space position of sample in old frame
113        const float oldEyeSpaceDepth = oldPixel.w;
114
115        // vector from eye pos to old sample
116        const float3 viewVec = Interpol(oldTexCoords, oldbl, oldbr, oldtl, oldtr);
117        const float invLen = 1.0f / length(viewVec);
118        const float projectedEyeSpaceDepth = invLen * length(translatedPos);
119        //const float projectedEyeSpaceDepth = length(translatedPos);
120       
121        const float depthDif = abs(1.0f - oldEyeSpaceDepth / projectedEyeSpaceDepth);
122
123#else
124
125        // calculate eye space position of sample in old frame
126        const float oldDepth = oldPixel.w;
127        // the depth projected into the old frame
128        const float projectedDepth = projPos.z;
129        // calculate depth difference
130        const float depthDif = abs(projectedDepth - oldDepth);
131
132#endif
133
134        const float xOffs = 1.0f / 1024.0f;
135        const float yOffs = 1.0f / 768.0f;
136        const float eps = 1e-6f;
137
138        // the weight of the old value
139        float w;
140
141        //////////////
142        //-- reuse old value only if it was still valid in the old frame
143
144        if (1
145                && (oldTexCoords.x + eps >= xOffs) && (oldTexCoords.x <= 1.0f - xOffs + eps)
146                && (oldTexCoords.y + eps >= yOffs) && (oldTexCoords.y <= 1.0f - yOffs + eps)
147                && (depthDif <= MIN_DEPTH_DIFF)
148                )
149        {
150                // pixel valid => retrieve the convergence weight
151                w = 10.0f;//oldPixel.y;
152        }
153        else
154        {       
155                w = 0.0f;
156        }
157
158        return float2(ssao, w);
159}
160
161
162/** The ssao shader returning the an intensity value between 0 and 1
163        This version of the ssao shader uses the dotproduct between pixel and
164        sample normal as weight.
165*/
166float3 ssao2(fragment IN,
167                         sampler2D colors,
168                         sampler2D noiseTex,
169                         float2 samples[NUM_SAMPLES],
170                         float3 normal,
171                         float3 centerPosition,
172                         float scaleFactor,
173                         float3 bl,
174                         float3 br,
175                         float3 tl,
176                         float3 tr,
177                         float3 viewDir,
178                         sampler2D normalTex
179                         )
180{
181        float total_ao = .0f;
182        float numSamples = .0f;
183        float validSamples = .0f;
184
185        for (int i = 0; i < NUM_SAMPLES; ++ i)
186        {
187                const float2 offset = samples[i];
188
189#if 1
190                ////////////////////
191                //-- add random noise: reflect around random normal vector (rather slow!)
192
193                const float2 mynoise = tex2Dlod(noiseTex, float4(IN.texCoord * 4.0f, 0, 0)).xy;
194                const float2 offsetTransformed = myreflect(offset, mynoise);
195#else
196                const float2 offsetTransformed = offset;
197#endif
198                // weight with projected coordinate to reach similar kernel size for near and far
199                //const float2 texcoord = IN.texCoord.xy + offsetTransformed * scaleFactor + jitter;
200                const float2 texcoord = IN.texCoord.xy + offsetTransformed * scaleFactor;
201
202                //if ((texcoord.x <= 1.0f) && (texcoord.x >= 0.0f) && (texcoord.y <= 1.0f) && (texcoord.y >= 0.0f)) ++ numSamples;
203                float4 sampleColor = tex2Dlod(colors, float4(texcoord, 0, 0));
204
205                const float3 samplePos = ReconstructSamplePos(sampleColor.w, texcoord, bl, br, tl, tr);
206                // the normal of the current sample
207                const float3 sampleNormal = tex2Dlod(normalTex, float4(texcoord, 0, 0)).xyz;
208
209
210                ////////////////
211                //-- compute contribution of sample using the direction and angle
212
213                float3 dirSample = samplePos - centerPosition;
214
215                const float sqrLen = max(SqrLen(dirSample), 1e-2f);
216                const float lengthToSample = sqrt(sqrLen);
217                //const float lengthToSample = max(length(dirSample), 1e-6f);
218
219                dirSample /= lengthToSample; // normalize
220
221                // angle between current normal and direction to sample controls AO intensity.
222                float cosAngle = .5f + dot(sampleNormal, -normal) * 0.5f;
223                // use binary decision to cull samples that are behind current shading point
224                cosAngle *= step(0.0f, dot(dirSample, normal));
225       
226                const float aoContrib = SAMPLE_INTENSITY / sqrLen;
227                //const float aoContrib = (1.0f > lengthToSample) ? occlusionPower(9e-2f, DISTANCE_SCALE + lengthToSample): .0f;
228
229#if 1
230                // if surface normal perpenticular to view dir, approx. half of the samples will not count
231                // => compensate for this (on the other hand, projected sampling area could be larger!)
232
233                const float viewCorrection = 1.0f + VIEW_CORRECTION_SCALE * max(dot(viewDir, normal), 0.0f);
234                total_ao += cosAngle * aoContrib * viewCorrection;
235#else
236                total_ao += cosAngle * aoContrib;
237#endif
238                // check if the samples have been valid in the last frame
239                validSamples += (1.0f - step(1.0f, lengthToSample)) * sampleColor.x;
240
241                ++ numSamples;
242        }
243
244        total_ao /= numSamples;
245
246        return float3(max(0.0f, 1.0f - total_ao), validSamples, numSamples);
247}
248
249
250/** The ssao shader returning the an intensity value between 0 and 1.
251        This version of the ssao shader uses the dotproduct between
252        pixel-to-sample direction and sample normal as weight.
253
254    The algorithm works like the following:
255        1) Check in a circular area around the current position.
256        2) Shoot vectors to the positions there, and check the angle to these positions.
257        3) Summing up these angles gives an estimation of the occlusion at the current position.
258*/
259float3 ssao(fragment IN,
260                        sampler2D colors,
261                        sampler2D noiseTex,
262                        float2 samples[NUM_SAMPLES],
263                        float3 normal,
264                        float3 centerPosition,
265                        float scaleFactor,
266                        float3 bl,
267                        float3 br,
268                        float3 tl,
269                        float3 tr,
270                        float3 viewDir,
271                        float newWeight
272                        )
273{
274        float total_ao = .0f;
275        float validSamples = .0f;
276        float numSamples = .0f;
277
278        for (int i = 0; i < NUM_SAMPLES; ++ i)
279        {
280                const float2 offset = samples[i];
281
282#if 1
283                ////////////////////
284                //-- add random noise: reflect around random normal vector
285                //-- (slows down the computation for some reason!)
286
287                float2 mynoise = tex2Dlod(noiseTex, float4(IN.texCoord * 4.0f, 0, 0)).xy;
288                const float2 offsetTransformed = myreflect(offset, mynoise);
289#else
290                const float2 offsetTransformed = offset;
291#endif
292                // weight with projected coordinate to reach similar kernel size for near and far
293                const float2 texcoord = IN.texCoord.xy + offsetTransformed * scaleFactor;
294
295                const float4 sampleColor = tex2Dlod(colors, float4(texcoord, .0f, .0f));
296                const float3 samplePos = ReconstructSamplePos(sampleColor.w, texcoord, bl, br, tl, tr);
297               
298
299                ////////////////
300                //-- compute contribution of sample using the direction and angle
301
302                float3 dirSample = samplePos - centerPosition;
303
304                const float sqrLen = max(SqrLen(dirSample), 1e-2f);
305                const float lengthToSample = sqrt(sqrLen);
306
307                dirSample /= lengthToSample; // normalize
308
309                // angle between current normal and direction to sample controls AO intensity.
310                const float cosAngle = max(dot(dirSample, normal), .0f);
311                const float aoContrib = SAMPLE_INTENSITY / sqrLen;
312                //const float aoContrib = (1.0f > lengthToSample) ? occlusionPower(9e-2f, DISTANCE_SCALE + lengthToSample): .0f;
313
314#if 1
315                // if surface normal perpenticular to view dir, approx. half of the samples will not count
316                // => compensate for this (on the other hand, projected sampling area could be larger!)
317
318                const float viewCorrection = 1.0f + VIEW_CORRECTION_SCALE * max(dot(viewDir, normal), 0.0f);
319                total_ao += cosAngle * aoContrib * viewCorrection;
320#else
321                total_ao += cosAngle * aoContrib;
322#endif
323
324                // check if the samples have been valid in the last frame
325                // hack: the distance measure can fail in some cases => choose something different
326                const float tooFarAway = step(1.0f, lengthToSample);
327                validSamples = max(validSamples, (1.0f - tooFarAway) * sampleColor.x);
328
329                //validSamples += sampleColor.x;
330
331                ++ numSamples;
332
333                //if ((validSamples < 1.0f) && (newWeight > 200) && (numSamples >= 8)) break;
334                //if ((validSamples < 1.0f) && (numSamples >= 8)) break;
335        }
336
337        total_ao /= numSamples;
338
339        return float3(max(0.0f, 1.0f - total_ao), validSamples, numSamples);
340}
341
342
343
344/** The mrt shader for screen space ambient occlusion
345*/
346pixel main(fragment IN,
347                   uniform sampler2D colors,
348                   uniform sampler2D normals,
349                   uniform sampler2D noiseTex,
350                   uniform float2 samples[NUM_SAMPLES],
351                   uniform sampler2D oldTex,
352                   uniform float4x4 modelViewProj,
353                   uniform float4x4 oldModelViewProj,
354                   uniform float temporalCoherence,
355                   uniform float3 bl,
356                   uniform float3 br,
357                   uniform float3 tl,
358                   uniform float3 tr,
359                   uniform float3 oldEyePos,
360                   uniform float3 oldbl,
361                   uniform float3 oldbr,
362                   uniform float3 oldtl,
363                   uniform float3 oldtr,
364                   uniform sampler2D attribsTex
365                   )
366{
367        pixel OUT;
368
369        //const float3 normal = normalize(tex2Dlod(normals, float4(IN.texCoord, 0 ,0)).xyz);
370        const float3 normal = tex2Dlod(normals, float4(IN.texCoord, 0 ,0)).xyz;
371
372        // reconstruct position from the eye space depth
373        const float3 viewDir = IN.view;
374        const float eyeSpaceDepth = tex2Dlod(colors, float4(IN.texCoord, 0, 0)).w;
375        const float4 eyeSpacePos = float4(-viewDir * eyeSpaceDepth, 1.0f);
376
377        float3 diffVec = tex2Dlod(attribsTex, float4(IN.texCoord, 0, 0)).xyz;
378       
379
380        ////////////////
381        //-- calculcate the current projected posiion (also used for next frame)
382       
383        float4 projPos = mul(modelViewProj, eyeSpacePos);
384        const float invw = 1.0f / projPos.w;
385        projPos *= invw;
386        float scaleFactor = SAMPLE_RADIUS * invw;
387
388       
389        /////////////////
390        //-- compute temporal reprojection
391
392        float2 temporalVals = temporalSmoothing(eyeSpacePos, eyeSpaceDepth, IN.texCoord, oldEyePos,
393                                                oldTex, oldModelViewProj,
394                                                                                        colors,
395                                                                                        projPos.xyz,
396                                                                                        invw,
397                                                                                        oldbl, oldbr, oldtl, oldtr,
398                                                                                        diffVec
399                                                                                        );
400
401        const float oldSsao = temporalVals.x;
402        float oldWeight = temporalVals.y;
403       
404        float3 ao;
405
406        // cull background note: this should be done with the stencil buffer
407        if (eyeSpaceDepth < 1e10f)
408        {
409                ao = ssao(IN, colors, noiseTex, samples, normal, eyeSpacePos.xyz, scaleFactor, bl, br, tl, tr, normalize(viewDir), oldWeight);
410                //ao = ssao2(IN, colors, noiseTex, samples, normal, eyeSpacePos.xyz, scaleFactor, bl, br, tl, tr, normalize(viewDir), normals);
411        }
412        else
413        {
414                 ao = float3(1.0f, 1.0f, 1.0f);
415        }
416
417        const float squaredLen = SqrLen(diffVec);
418/*     
419        if (ao.y > 4.0f) oldWeight = 0;
420        else if ((ao.y > 1.0f) && (squaredLen < DYNAMIC_OBJECTS_THRESHOLD))
421                oldWeight = min(oldWeight, 4.0f * NUM_SAMPLES);
422*/     
423        const float newWeight = ao.z;
424
425        // blend between old and new samples (and avoid division by zero)
426        OUT.illum_col.x = (ao.x * newWeight + oldSsao * oldWeight) / max(1e-6f, (newWeight + oldWeight));
427        OUT.illum_col.y = oldWeight;//clamp(newWeight + oldWeight, .0f, temporalCoherence);
428
429        OUT.illum_col.z = SqrLen(diffVec);
430        OUT.illum_col.w = eyeSpaceDepth;
431
432        return OUT;
433}
Note: See TracBrowser for help on using the repository browser.