[2884] | 1 | #include "../shaderenv.h"
|
---|
[3227] | 2 | #include "common.h"
|
---|
[2884] | 3 |
|
---|
[2881] | 4 | ////////////////////
|
---|
| 5 | // Screen Spaced Ambient Occlusion shader
|
---|
| 6 | // based on shader of Alexander Kusternig
|
---|
| 7 |
|
---|
[3144] | 8 |
|
---|
[3106] | 9 | #define USE_EYESPACE_DEPTH 1
|
---|
[3105] | 10 |
|
---|
| 11 |
|
---|
[2881] | 12 | struct fragment
|
---|
| 13 | {
|
---|
[2889] | 14 | float2 texCoord: TEXCOORD0;
|
---|
| 15 | float3 view: TEXCOORD1;
|
---|
[2881] | 16 | };
|
---|
| 17 |
|
---|
| 18 |
|
---|
| 19 | struct pixel
|
---|
| 20 | {
|
---|
| 21 | float4 illum_col: COLOR0;
|
---|
| 22 | };
|
---|
| 23 |
|
---|
[3247] | 24 | // this function is inspired from the paper of shamulgaan in order
|
---|
| 25 | // to get a physical expression for the occlusion culling
|
---|
[3081] | 26 | inline float occlusionPower(float radius, float dist)
|
---|
| 27 | {
|
---|
| 28 | return 6.283185307179586476925286766559f * (1.0f - cos(asin(radius / dist)));
|
---|
| 29 | }
|
---|
| 30 |
|
---|
| 31 |
|
---|
[3159] | 32 |
|
---|
[2992] | 33 | // reconstruct world space position
|
---|
[3155] | 34 | inline float3 ReconstructSamplePos(float eyeSpaceDepth,
|
---|
[3017] | 35 | float2 texcoord,
|
---|
| 36 | float3 bl, float3 br, float3 tl, float3 tr)
|
---|
[2988] | 37 | {
|
---|
[3097] | 38 | float3 viewVec = Interpol(texcoord, bl, br, tl, tr);
|
---|
[3017] | 39 | float3 samplePos = -viewVec * eyeSpaceDepth;
|
---|
| 40 |
|
---|
[2999] | 41 | return samplePos;
|
---|
[2988] | 42 | }
|
---|
| 43 |
|
---|
| 44 |
|
---|
[3087] | 45 |
|
---|
[3115] | 46 | /** This shader computes the reprojection and stores
|
---|
[3155] | 47 | the ssao value of the old pixel as well as the
|
---|
| 48 | weight of the pixel in the new frame.
|
---|
[3082] | 49 | */
|
---|
[3310] | 50 | inline float3 temporalSmoothing(float4 worldPos,
|
---|
[3095] | 51 | float eyeSpaceDepth,
|
---|
| 52 | float2 texcoord0,
|
---|
| 53 | float3 oldEyePos,
|
---|
[3113] | 54 | sampler2D oldTex,
|
---|
| 55 | float4x4 oldModelViewProj,
|
---|
| 56 | sampler2D colors,
|
---|
[3112] | 57 | float3 projPos,
|
---|
[3109] | 58 | float invW,
|
---|
[3113] | 59 | float3 oldbl,
|
---|
| 60 | float3 oldbr,
|
---|
| 61 | float3 oldtl,
|
---|
| 62 | float3 oldtr,
|
---|
[3192] | 63 | float3 diffVec
|
---|
[3109] | 64 | )
|
---|
[3082] | 65 | {
|
---|
[3113] | 66 | // compute position from old frame for dynamic objects + translational portion
|
---|
[3133] | 67 | const float3 translatedPos = diffVec - oldEyePos + worldPos.xyz;
|
---|
[3111] | 68 |
|
---|
[3082] | 69 |
|
---|
[3109] | 70 | /////////////////
|
---|
| 71 | //-- reproject into old frame and calculate texture position of sample in old frame
|
---|
| 72 |
|
---|
| 73 | // note: the old model view matrix only holds the view orientation part
|
---|
[3115] | 74 | float4 backProjPos = mul(oldModelViewProj, float4(translatedPos, 1.0f));
|
---|
[3083] | 75 | backProjPos /= backProjPos.w;
|
---|
[3109] | 76 |
|
---|
[3082] | 77 | // fit from unit cube into 0 .. 1
|
---|
[3085] | 78 | const float2 oldTexCoords = backProjPos.xy * 0.5f + 0.5f;
|
---|
[3082] | 79 | // retrieve the sample from the last frame
|
---|
[3095] | 80 | const float4 oldPixel = tex2Dlod(oldTex, float4(oldTexCoords, .0f, .0f));
|
---|
[3105] | 81 |
|
---|
[3204] | 82 | // the ssao value in the old frame
|
---|
| 83 | const float ssao = oldPixel.x;
|
---|
| 84 |
|
---|
[3095] | 85 | // calculate eye space position of sample in old frame
|
---|
| 86 | const float oldEyeSpaceDepth = oldPixel.w;
|
---|
[3082] | 87 |
|
---|
[3095] | 88 | // vector from eye pos to old sample
|
---|
[3097] | 89 | const float3 viewVec = Interpol(oldTexCoords, oldbl, oldbr, oldtl, oldtr);
|
---|
[3109] | 90 | const float invLen = 1.0f / length(viewVec);
|
---|
[3115] | 91 | const float projectedEyeSpaceDepth = invLen * length(translatedPos);
|
---|
[3137] | 92 | //const float projectedEyeSpaceDepth = length(translatedPos);
|
---|
[3099] | 93 |
|
---|
[3109] | 94 | const float depthDif = abs(1.0f - oldEyeSpaceDepth / projectedEyeSpaceDepth);
|
---|
[3106] | 95 |
|
---|
[3225] | 96 | // the weight of the accumulated samples from the previous frames
|
---|
[3204] | 97 | float w;
|
---|
[3310] | 98 | float idx;
|
---|
[3204] | 99 |
|
---|
| 100 | //////////////
|
---|
| 101 | //-- reuse old value only if it was still valid in the old frame
|
---|
| 102 |
|
---|
[3192] | 103 | if (1
|
---|
[3225] | 104 | && (oldTexCoords.x > 0) && (oldTexCoords.x < 1.0f)
|
---|
| 105 | && (oldTexCoords.y > 0) && (oldTexCoords.y < 1.0f)
|
---|
[3103] | 106 | && (depthDif <= MIN_DEPTH_DIFF)
|
---|
[3082] | 107 | )
|
---|
| 108 | {
|
---|
[3204] | 109 | // pixel valid => retrieve the convergence weight
|
---|
[3205] | 110 | w = oldPixel.y;
|
---|
[3310] | 111 | idx = oldPixel.z;
|
---|
[3082] | 112 | }
|
---|
| 113 | else
|
---|
| 114 | {
|
---|
[3310] | 115 | w = .0f;
|
---|
| 116 | idx = .0f;
|
---|
[3082] | 117 | }
|
---|
[3087] | 118 |
|
---|
[3310] | 119 | return float3(ssao, w, idx);
|
---|
[3082] | 120 | }
|
---|
| 121 |
|
---|
| 122 |
|
---|
[2881] | 123 | /** The ssao shader returning the an intensity value between 0 and 1
|
---|
[3151] | 124 | This version of the ssao shader uses the dotproduct between pixel and
|
---|
| 125 | sample normal as weight.
|
---|
[2881] | 126 | */
|
---|
[3193] | 127 | float3 ssao2(fragment IN,
|
---|
[3150] | 128 | sampler2D colors,
|
---|
| 129 | sampler2D noiseTex,
|
---|
| 130 | float2 samples[NUM_SAMPLES],
|
---|
| 131 | float3 normal,
|
---|
| 132 | float3 centerPosition,
|
---|
| 133 | float scaleFactor,
|
---|
| 134 | float3 bl,
|
---|
| 135 | float3 br,
|
---|
| 136 | float3 tl,
|
---|
| 137 | float3 tr,
|
---|
| 138 | float3 viewDir,
|
---|
[3212] | 139 | sampler2D normalTex,
|
---|
| 140 | float sampleIntensity
|
---|
[3150] | 141 | )
|
---|
| 142 | {
|
---|
| 143 | float total_ao = .0f;
|
---|
| 144 | float numSamples = .0f;
|
---|
[3203] | 145 | float validSamples = .0f;
|
---|
[3150] | 146 |
|
---|
[3310] | 147 |
|
---|
[3309] | 148 | for (int i = 0; i < NUM_PRECOMPUTED_SAMPLES; ++ i)
|
---|
[3150] | 149 | {
|
---|
| 150 | const float2 offset = samples[i];
|
---|
| 151 |
|
---|
| 152 | #if 1
|
---|
| 153 | ////////////////////
|
---|
| 154 | //-- add random noise: reflect around random normal vector (rather slow!)
|
---|
| 155 |
|
---|
| 156 | const float2 mynoise = tex2Dlod(noiseTex, float4(IN.texCoord * 4.0f, 0, 0)).xy;
|
---|
| 157 | const float2 offsetTransformed = myreflect(offset, mynoise);
|
---|
| 158 | #else
|
---|
| 159 | const float2 offsetTransformed = offset;
|
---|
| 160 | #endif
|
---|
| 161 | // weight with projected coordinate to reach similar kernel size for near and far
|
---|
| 162 | //const float2 texcoord = IN.texCoord.xy + offsetTransformed * scaleFactor + jitter;
|
---|
| 163 | const float2 texcoord = IN.texCoord.xy + offsetTransformed * scaleFactor;
|
---|
| 164 |
|
---|
| 165 | //if ((texcoord.x <= 1.0f) && (texcoord.x >= 0.0f) && (texcoord.y <= 1.0f) && (texcoord.y >= 0.0f)) ++ numSamples;
|
---|
[3155] | 166 | float4 sampleColor = tex2Dlod(colors, float4(texcoord, 0, 0));
|
---|
| 167 |
|
---|
| 168 | const float3 samplePos = ReconstructSamplePos(sampleColor.w, texcoord, bl, br, tl, tr);
|
---|
[3159] | 169 | // the normal of the current sample
|
---|
[3167] | 170 | const float3 sampleNormal = tex2Dlod(normalTex, float4(texcoord, 0, 0)).xyz;
|
---|
[3150] | 171 |
|
---|
| 172 |
|
---|
| 173 | ////////////////
|
---|
| 174 | //-- compute contribution of sample using the direction and angle
|
---|
| 175 |
|
---|
| 176 | float3 dirSample = samplePos - centerPosition;
|
---|
| 177 |
|
---|
[3199] | 178 | const float sqrLen = max(SqrLen(dirSample), 1e-2f);
|
---|
| 179 | const float lengthToSample = sqrt(sqrLen);
|
---|
| 180 | //const float lengthToSample = max(length(dirSample), 1e-6f);
|
---|
| 181 |
|
---|
[3150] | 182 | dirSample /= lengthToSample; // normalize
|
---|
| 183 |
|
---|
| 184 | // angle between current normal and direction to sample controls AO intensity.
|
---|
[3151] | 185 | float cosAngle = .5f + dot(sampleNormal, -normal) * 0.5f;
|
---|
[3155] | 186 | // use binary decision to cull samples that are behind current shading point
|
---|
| 187 | cosAngle *= step(0.0f, dot(dirSample, normal));
|
---|
[3150] | 188 |
|
---|
[3212] | 189 | const float aoContrib = sampleIntensity / sqrLen;
|
---|
[3150] | 190 | //const float aoContrib = (1.0f > lengthToSample) ? occlusionPower(9e-2f, DISTANCE_SCALE + lengthToSample): .0f;
|
---|
| 191 |
|
---|
[3225] | 192 | total_ao += cosAngle * aoContrib;
|
---|
[3150] | 193 |
|
---|
[3157] | 194 | // check if the samples have been valid in the last frame
|
---|
[3203] | 195 | validSamples += (1.0f - step(1.0f, lengthToSample)) * sampleColor.x;
|
---|
| 196 |
|
---|
[3193] | 197 | ++ numSamples;
|
---|
[3150] | 198 | }
|
---|
| 199 |
|
---|
[3193] | 200 | total_ao /= numSamples;
|
---|
| 201 |
|
---|
[3227] | 202 | #if 1
|
---|
[3225] | 203 | // if surface normal perpenticular to view dir, approx. half of the samples will not count
|
---|
| 204 | // => compensate for this (on the other hand, projected sampling area could be larger!)
|
---|
| 205 | const float viewCorrection = 1.0f + VIEW_CORRECTION_SCALE * max(dot(viewDir, normal), 0.0f);
|
---|
[3227] | 206 | total_ao += cosAngle * aoContrib * viewCorrection;
|
---|
[3225] | 207 |
|
---|
| 208 | #endif
|
---|
| 209 |
|
---|
[3227] | 210 | return float3(max(0.0f, 1.0f - total_ao), validSamples, numSamples);
|
---|
[3150] | 211 | }
|
---|
| 212 |
|
---|
| 213 |
|
---|
[3151] | 214 | /** The ssao shader returning the an intensity value between 0 and 1.
|
---|
| 215 | This version of the ssao shader uses the dotproduct between
|
---|
| 216 | pixel-to-sample direction and sample normal as weight.
|
---|
[3204] | 217 |
|
---|
| 218 | The algorithm works like the following:
|
---|
| 219 | 1) Check in a circular area around the current position.
|
---|
| 220 | 2) Shoot vectors to the positions there, and check the angle to these positions.
|
---|
| 221 | 3) Summing up these angles gives an estimation of the occlusion at the current position.
|
---|
[3150] | 222 | */
|
---|
[3192] | 223 | float3 ssao(fragment IN,
|
---|
[3117] | 224 | sampler2D colors,
|
---|
| 225 | sampler2D noiseTex,
|
---|
[3311] | 226 | sampler2D samples,
|
---|
[3117] | 227 | float3 normal,
|
---|
| 228 | float3 centerPosition,
|
---|
| 229 | float scaleFactor,
|
---|
| 230 | float3 bl,
|
---|
| 231 | float3 br,
|
---|
| 232 | float3 tl,
|
---|
| 233 | float3 tr,
|
---|
[3192] | 234 | float3 viewDir,
|
---|
[3230] | 235 | float convergence,
|
---|
[3213] | 236 | float sampleIntensity,
|
---|
[3309] | 237 | bool isMovingObject,
|
---|
[3311] | 238 | float idx
|
---|
[3083] | 239 | )
|
---|
[2881] | 240 | {
|
---|
[3084] | 241 | float total_ao = .0f;
|
---|
[3192] | 242 | float validSamples = .0f;
|
---|
[3084] | 243 | float numSamples = .0f;
|
---|
[3309] | 244 |
|
---|
[2881] | 245 | for (int i = 0; i < NUM_SAMPLES; ++ i)
|
---|
| 246 | {
|
---|
[3230] | 247 | float2 offset;
|
---|
[2881] | 248 |
|
---|
| 249 | ////////////////////
|
---|
[3204] | 250 | //-- add random noise: reflect around random normal vector
|
---|
[3227] | 251 | //-- (affects performance for some reason!)
|
---|
[2985] | 252 |
|
---|
[3311] | 253 | const float2 ssaoOffset = tex2Dlod(samples, float4((0.5f + i + idx) / NUM_PRECOMPUTED_SAMPLES, 0.5f, .0f, .0f)).xy;
|
---|
| 254 |
|
---|
| 255 | if (convergence < SSAO_CONVERGENCE_THRESHOLD)
|
---|
[3230] | 256 | {
|
---|
[3311] | 257 | float2 mynoise = tex2Dlod(noiseTex, float4(IN.texCoord.x * 4.0f + idx * 0.01f, IN.texCoord.y * 4.0f, 0, 0)).xy;
|
---|
[3284] | 258 | //offset = myreflect(samples[i], mynoise);
|
---|
[3311] | 259 | //offset = myrotate(samples[i], mynoise.x);
|
---|
| 260 |
|
---|
| 261 | offset = myrotate(ssaoOffset, mynoise.x);
|
---|
[3230] | 262 | }
|
---|
| 263 | else
|
---|
| 264 | {
|
---|
[3311] | 265 | offset = ssaoOffset;
|
---|
| 266 | //offset = samples[i];
|
---|
[3230] | 267 | }
|
---|
[3227] | 268 |
|
---|
[2881] | 269 | // weight with projected coordinate to reach similar kernel size for near and far
|
---|
[3230] | 270 | const float2 texcoord = IN.texCoord.xy + offset * scaleFactor;
|
---|
[2881] | 271 |
|
---|
[3203] | 272 | const float4 sampleColor = tex2Dlod(colors, float4(texcoord, .0f, .0f));
|
---|
[3155] | 273 | const float3 samplePos = ReconstructSamplePos(sampleColor.w, texcoord, bl, br, tl, tr);
|
---|
[3150] | 274 |
|
---|
[2989] | 275 |
|
---|
[3017] | 276 | ////////////////
|
---|
| 277 | //-- compute contribution of sample using the direction and angle
|
---|
[2881] | 278 |
|
---|
[3017] | 279 | float3 dirSample = samplePos - centerPosition;
|
---|
[2999] | 280 |
|
---|
[3227] | 281 | //const float sqrLen = max(SqrLen(dirSample), 1e-2f);
|
---|
| 282 | //const float lengthToSample = sqrt(sqrLen);
|
---|
| 283 | const float lengthToSample = max(length(dirSample), 1e-2f);
|
---|
[3197] | 284 |
|
---|
[3095] | 285 | dirSample /= lengthToSample; // normalize
|
---|
| 286 |
|
---|
[2885] | 287 | // angle between current normal and direction to sample controls AO intensity.
|
---|
[3227] | 288 | float cosAngle = dot(dirSample, normal);
|
---|
| 289 |
|
---|
| 290 | //const float aoContrib = sampleIntensity / sqrLen;
|
---|
| 291 | const float aoContrib = sampleIntensity / lengthToSample;
|
---|
[3089] | 292 | //const float aoContrib = (1.0f > lengthToSample) ? occlusionPower(9e-2f, DISTANCE_SCALE + lengthToSample): .0f;
|
---|
[2881] | 293 |
|
---|
[3227] | 294 | total_ao += max(cosAngle, 0) * aoContrib;
|
---|
[3157] | 295 |
|
---|
[3206] | 296 | ++ numSamples;
|
---|
[3213] | 297 |
|
---|
[3157] | 298 | // check if the samples have been valid in the last frame
|
---|
[3213] | 299 | // only mark sample as invalid if in the last / current frame
|
---|
| 300 | // they possibly have any influence on the ao
|
---|
[3206] | 301 | const float changeFactor = sampleColor.y;
|
---|
| 302 | const float pixelValid = sampleColor.x;
|
---|
[3204] | 303 |
|
---|
[3213] | 304 | // we check if the sample could have been near enough to the current pixel
|
---|
| 305 | // to have any influence in the current or last frame
|
---|
[3307] | 306 | //const float tooFarAway = step(0.5f, lengthToSample - changeFactor);
|
---|
[3306] | 307 | //validSamples = max(validSamples, (1.0f - tooFarAway) * pixelValid * step(-0.1f, cosAngle));
|
---|
| 308 | validSamples = max(validSamples, pixelValid);
|
---|
[3192] | 309 |
|
---|
[3227] | 310 | #ifdef USE_GTX
|
---|
[3213] | 311 | // we can bail out early and use a minimal #samples)
|
---|
| 312 | // if some conditions are met as long as the hardware supports it
|
---|
[3230] | 313 | if (numSamples >= MIN_SAMPLES)
|
---|
[3213] | 314 | {
|
---|
| 315 | // if the pixel belongs to a static object and all the samples stay valid in the current frame
|
---|
| 316 | if (!isMovingObject && (validSamples < 1.0f)) break;
|
---|
[3311] | 317 | // if the pixel belongs to a dynamic object but the #accumulated samples
|
---|
| 318 | // for this pixel is sufficiently high (=> there was no discontinuity recently)
|
---|
[3230] | 319 | else if (isMovingObject && (convergence > NUM_SAMPLES * 5)) break;
|
---|
[3213] | 320 | }
|
---|
| 321 | #endif
|
---|
[2881] | 322 | }
|
---|
| 323 |
|
---|
[3225] | 324 | // "normalize" ao contribution
|
---|
[3192] | 325 | total_ao /= numSamples;
|
---|
| 326 |
|
---|
[3225] | 327 | #if 1
|
---|
| 328 | // if surface normal perpenticular to view dir, approx. half of the samples will not count
|
---|
| 329 | // => compensate for this (on the other hand, projected sampling area could be larger!)
|
---|
| 330 | const float viewCorrection = 1.0f + VIEW_CORRECTION_SCALE * max(dot(viewDir, normal), 0.0f);
|
---|
| 331 | total_ao *= viewCorrection;
|
---|
| 332 | #endif
|
---|
| 333 |
|
---|
[3271] | 334 | //return float3(total_ao, validSamples, numSamples);
|
---|
| 335 | return float3(min(1.0f, total_ao), validSamples, numSamples);
|
---|
[2881] | 336 | }
|
---|
| 337 |
|
---|
[3121] | 338 |
|
---|
[3150] | 339 |
|
---|
[2881] | 340 | /** The mrt shader for screen space ambient occlusion
|
---|
| 341 | */
|
---|
| 342 | pixel main(fragment IN,
|
---|
| 343 | uniform sampler2D colors,
|
---|
| 344 | uniform sampler2D normals,
|
---|
[3084] | 345 | uniform sampler2D noiseTex,
|
---|
[3311] | 346 | uniform sampler2D samples,
|
---|
[2881] | 347 | uniform sampler2D oldTex,
|
---|
[3085] | 348 | uniform float4x4 modelViewProj,
|
---|
| 349 | uniform float4x4 oldModelViewProj,
|
---|
[2985] | 350 | uniform float temporalCoherence,
|
---|
[2986] | 351 | uniform float3 bl,
|
---|
| 352 | uniform float3 br,
|
---|
| 353 | uniform float3 tl,
|
---|
[3085] | 354 | uniform float3 tr,
|
---|
| 355 | uniform float3 oldEyePos,
|
---|
| 356 | uniform float3 oldbl,
|
---|
| 357 | uniform float3 oldbr,
|
---|
| 358 | uniform float3 oldtl,
|
---|
[3109] | 359 | uniform float3 oldtr,
|
---|
[3212] | 360 | uniform sampler2D attribsTex,
|
---|
| 361 | uniform float kernelRadius,
|
---|
| 362 | uniform float sampleIntensity
|
---|
[2881] | 363 | )
|
---|
| 364 | {
|
---|
| 365 | pixel OUT;
|
---|
| 366 |
|
---|
[3167] | 367 | //const float3 normal = normalize(tex2Dlod(normals, float4(IN.texCoord, 0 ,0)).xyz);
|
---|
| 368 | const float3 normal = tex2Dlod(normals, float4(IN.texCoord, 0 ,0)).xyz;
|
---|
[2975] | 369 |
|
---|
[3082] | 370 | // reconstruct position from the eye space depth
|
---|
[3097] | 371 | const float3 viewDir = IN.view;
|
---|
[3089] | 372 | const float eyeSpaceDepth = tex2Dlod(colors, float4(IN.texCoord, 0, 0)).w;
|
---|
[3097] | 373 | const float4 eyeSpacePos = float4(-viewDir * eyeSpaceDepth, 1.0f);
|
---|
[3014] | 374 |
|
---|
[3121] | 375 | float3 diffVec = tex2Dlod(attribsTex, float4(IN.texCoord, 0, 0)).xyz;
|
---|
| 376 |
|
---|
[3001] | 377 |
|
---|
[3017] | 378 | ////////////////
|
---|
[3080] | 379 | //-- calculcate the current projected posiion (also used for next frame)
|
---|
[3017] | 380 |
|
---|
[3094] | 381 | float4 projPos = mul(modelViewProj, eyeSpacePos);
|
---|
[3112] | 382 | const float invw = 1.0f / projPos.w;
|
---|
| 383 | projPos *= invw;
|
---|
[3212] | 384 | float scaleFactor = kernelRadius * invw;
|
---|
[3121] | 385 |
|
---|
[3213] | 386 | const float sqrMoveSpeed = SqrLen(diffVec);
|
---|
| 387 | const bool isMovingObject = (sqrMoveSpeed > DYNAMIC_OBJECTS_THRESHOLD);
|
---|
| 388 |
|
---|
[3017] | 389 |
|
---|
[3121] | 390 | /////////////////
|
---|
| 391 | //-- compute temporal reprojection
|
---|
| 392 |
|
---|
[3310] | 393 | float3 temporalVals = temporalSmoothing(eyeSpacePos, eyeSpaceDepth, IN.texCoord, oldEyePos,
|
---|
[3192] | 394 | oldTex, oldModelViewProj,
|
---|
[3121] | 395 | colors,
|
---|
| 396 | projPos.xyz,
|
---|
| 397 | invw,
|
---|
| 398 | oldbl, oldbr, oldtl, oldtr,
|
---|
[3192] | 399 | diffVec
|
---|
[3129] | 400 | );
|
---|
[3121] | 401 |
|
---|
| 402 | const float oldSsao = temporalVals.x;
|
---|
[3192] | 403 | float oldWeight = temporalVals.y;
|
---|
[3307] | 404 |
|
---|
[3311] | 405 | float idx = (int)temporalVals.z;
|
---|
[3307] | 406 |
|
---|
[3311] | 407 | if (idx >= NUM_PRECOMPUTED_SAMPLES) idx = 0;
|
---|
| 408 |
|
---|
[3192] | 409 | float3 ao;
|
---|
[3137] | 410 |
|
---|
[3192] | 411 | // cull background note: this should be done with the stencil buffer
|
---|
[3304] | 412 | if (eyeSpaceDepth < DEPTH_THRESHOLD)
|
---|
[3192] | 413 | {
|
---|
[3310] | 414 | ao = ssao(IN, colors, noiseTex, samples, normal, eyeSpacePos.xyz,
|
---|
| 415 | scaleFactor, bl, br, tl, tr, normalize(viewDir), oldWeight,
|
---|
| 416 | sampleIntensity, isMovingObject, idx);
|
---|
[3212] | 417 | //ao = ssao2(IN, colors, noiseTex, samples, normal, eyeSpacePos.xyz, scaleFactor, bl, br, tl, tr, normalize(viewDir), normals, sampleIntensity);
|
---|
[3192] | 418 | }
|
---|
| 419 | else
|
---|
| 420 | {
|
---|
[3198] | 421 | ao = float3(1.0f, 1.0f, 1.0f);
|
---|
[3192] | 422 | }
|
---|
[3122] | 423 |
|
---|
[3213] | 424 |
|
---|
| 425 | ///////////
|
---|
| 426 | //-- check if we have to reset pixel because one of the sample points was invalid
|
---|
| 427 | //-- only do this if the current pixel does not belong to a moving object
|
---|
[3205] | 428 |
|
---|
[3213] | 429 | // the weight equals the number of sampled shot in this pass
|
---|
| 430 | const float newWeight = ao.z;
|
---|
| 431 |
|
---|
[3311] | 432 | idx += newWeight;
|
---|
| 433 |
|
---|
[3225] | 434 | // completely reset the ao in this pixel
|
---|
[3306] | 435 | const float completelyResetThres = 20.0f;
|
---|
[3225] | 436 | // don't fully reset the ao in this pixel, but give low weight to old solution
|
---|
[3213] | 437 | const float partlyResetThres = 1.0f;
|
---|
| 438 |
|
---|
[3307] | 439 | // hack: just update static geometry
|
---|
| 440 | if (!isMovingObject)
|
---|
[3206] | 441 | {
|
---|
[3213] | 442 | if (ao.y > completelyResetThres)
|
---|
[3225] | 443 | {
|
---|
[3310] | 444 | oldWeight = .0f;
|
---|
| 445 | idx = .0f;
|
---|
[3225] | 446 | }
|
---|
[3213] | 447 | else if (ao.y > partlyResetThres)
|
---|
[3225] | 448 | {
|
---|
[3226] | 449 | oldWeight = min(oldWeight, 4.0f * newWeight);
|
---|
[3225] | 450 | }
|
---|
[3206] | 451 | }
|
---|
| 452 |
|
---|
[3310] | 453 |
|
---|
[3213] | 454 | //////////
|
---|
| 455 | //-- blend ao between old and new samples (and avoid division by zero)
|
---|
[3225] | 456 |
|
---|
[3311] | 457 | OUT.illum_col.x = ao.x * newWeight + oldSsao * oldWeight;
|
---|
| 458 | OUT.illum_col.x /= (newWeight + oldWeight);
|
---|
[3213] | 459 |
|
---|
[3227] | 460 | // the new weight for the next frame
|
---|
[3311] | 461 | const float combinedWeight = clamp(newWeight + oldWeight, .0f, temporalCoherence);
|
---|
| 462 |
|
---|
[3225] | 463 | OUT.illum_col.y = combinedWeight;
|
---|
| 464 | // can be used to check if this pixel belongs to a moving object
|
---|
[3311] | 465 | //OUT.illum_col.z = SqrLen(diffVec);
|
---|
| 466 | OUT.illum_col.z = idx;
|
---|
[3137] | 467 | OUT.illum_col.w = eyeSpaceDepth;
|
---|
[3120] | 468 |
|
---|
[3225] | 469 | //OUT.illum_col.yzw = diffVec;
|
---|
| 470 |
|
---|
[2881] | 471 | return OUT;
|
---|
[3104] | 472 | }
|
---|