source: GTP/trunk/App/Demos/Vis/FriendlyCulling/src/shaders/deferred.cg @ 3137

Revision 3137, 8.0 KB checked in by mattausch, 16 years ago (diff)

working ok

RevLine 
[2966]1#include "../shaderenv.h"
2
3
[2876]4struct fragment
5{
6         // normalized screen position
7        float4 pos: WPOS;
[3009]8        float2 texCoord: TEXCOORD0;
9        float3 view: TEXCOORD1;
[2876]10};
11
12
13struct pixel
14{
15        float4 color: COLOR0;
16};
17
18
[2944]19float2 myreflect(float2 pt, float2 n)
20{
21        // distance to plane
22        float d = dot(n, pt);
23        // reflect around plane
24        float2 rpt = pt - d * 2.0f * n;
25
26        return rpt;
27}
28
29
[2876]30/** function for standard deferred shading
31*/
32float4 shade(fragment IN,
33                         uniform float4 color,
34                         uniform float3 normal,
[2952]35                         float3 lightDir)
[2876]36{
[2954]37        // diffuse intensity
38        const float angle = saturate(dot(normal, lightDir));
39       
40        float4 lightDiffuse = glstate.light[0].diffuse;
41        float4 diffuse = angle * lightDiffuse;
[2876]42
43        // global ambient
[2954]44        const float4 ambient = glstate.light[0].ambient;
45       
[2968]46        float4 outColor;
[2967]47
[3005]48        // hack: prevent shading the sky
[3009]49        if (color.w > 1e19f) outColor = color;
[2974]50        else outColor = (ambient + diffuse) * color;
[2968]51
52        return outColor;
[2876]53}
54
55
56
57/** The mrt shader for standard rendering
58*/
[2874]59pixel main(fragment IN,
60                   uniform sampler2D colors,
[2952]61                   uniform sampler2D normals,
[2991]62                   uniform float3 lightDir
[2874]63                   )
64{
65        pixel OUT;
66
[3009]67        float4 norm = tex2D(normals, IN.texCoord);
68        float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0));
69       
[2945]70        float3 normal = normalize(norm.xyz);
[3089]71        float4 col = shade(IN, color, normal, lightDir);
[2874]72       
[3099]73        OUT.color = col;
[3095]74        // store scaled view vector so wie don't have to normalize for e.g., ssao
[3136]75        OUT.color.w = color.w;// / length(IN.view);
[3098]76       
[2874]77        return OUT;
[2876]78}
[2892]79
[2944]80
81float CalcShadowTerm(fragment IN,
82                                         uniform sampler2D shadowMap,
[3025]83                                         uniform float scale,
[2944]84                                         uniform float2 lightSpacePos,
[2952]85                                         uniform float depth,
[2966]86                                         uniform float2 samples[NUM_PCF_TABS],
[3025]87                                         uniform float weights[NUM_PCF_TABS],
[2944]88                                         uniform sampler2D noiseTexture
89                                         )
90{
[2954]91        //float shadowDepth = tex2D(shadowMap, lightSpacePos).x;
92        //return step(depth, shadowDepth);
[2944]93
[3025]94        float total_d = .0f;
95        float total_w = .0f;
96
[2966]97        for (int i = 0; i < NUM_PCF_TABS; ++ i)
[2944]98        {
99                const float2 offset = samples[i];
[3025]100                const float w = weights[i];
[2944]101
102#if 1
103                ////////////////////
104                //-- add random noise: reflect around random normal vector (warning: slow!)
105
[3009]106                float2 mynoise = tex2D(noiseTexture, IN.texCoord).xy;
[2944]107                const float2 offsetTransformed = myreflect(offset, mynoise);
108#else
109                const float2 offsetTransformed = offset;
110#endif
111                // weight with projected coordinate to reach similar kernel size for near and far
[3025]112                float2 texcoord = lightSpacePos + offsetTransformed * scale;
[2944]113
114                float shadowDepth = tex2D(shadowMap, texcoord).x;
115
[3025]116                total_d += w * step(depth, shadowDepth);
117                total_w += w;
[2944]118        }
119
[3025]120        total_d /= (float)total_w;
[2944]121
122        return total_d;
123}
124
[3025]125
[3009]126inline float3 Interpol(float2 w, float3 bl, float3 br, float3 tl, float3 tr)
127{
128        float3 x1 = lerp(bl, tl, w.y);
129        float3 x2 = lerp(br, tr, w.y);
[3095]130        float3 v  = lerp(x1, x2, w.x);
[2944]131
[3009]132        return v;
133}
134
135
[2892]136pixel main_shadow(fragment IN,
137                                  uniform sampler2D colors,
138                                  uniform sampler2D positions,
139                                  uniform sampler2D normals,               
140                                  uniform sampler2D shadowMap,
[2893]141                                  uniform float4x4 shadowMatrix,
[2952]142                                  uniform float sampleWidth,
[3092]143                                  uniform sampler2D noiseTex,
[2966]144                                  uniform float2 samples[NUM_PCF_TABS],
[3024]145                                  uniform float weights[NUM_PCF_TABS],
[3009]146                                  uniform float3 lightDir,
147                                  uniform float3 eyePos,
148                                  uniform float3 bl,
149                                  uniform float3 br,
150                                  uniform float3 tl,
151                                  uniform float3 tr
[2944]152                                  )
[2928]153{
154        pixel OUT;
[2944]155
156        float4 norm = tex2D(normals, IN.texCoord.xy);
[2945]157        const float3 normal = normalize(norm.xyz);
[2944]158
[3009]159        float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0));
160
161        /// reconstruct position from the eye space depth
162        float3 viewDir = IN.view;
[3018]163        const float lenView = length(viewDir);
164        viewDir /= lenView;
165
[3009]166        const float eyeDepth = tex2Dlod(colors, float4(IN.texCoord, 0, 0)).w;
167
[3034]168        const float4 worldPos = float4(eyePos - viewDir * eyeDepth, 1);
[3009]169       
[2945]170        // diffuse intensity
171        const float angle = saturate(dot(normal, lightDir));
[2959]172        const float4 lightDiffuse = glstate.light[0].diffuse;
[3017]173       
[2954]174        float4 diffuse = lightDiffuse * angle;
175
[3009]176        // hack: prevent shadowing the sky     
177        const bool useShading = (color.w < 1e19f);
178
[2945]179        // calc diffuse illumination + shadow term
[3009]180        if (useShading &&
181                (angle > 1e-3f) // shadow only if diffuse color has some minimum intensity
[2945]182                )
183        {
[3034]184                float4 lightSpacePos = mul(shadowMatrix, worldPos);
[2945]185                lightSpacePos /= lightSpacePos.w;
[2944]186
[3092]187                float shadowTerm = CalcShadowTerm(IN, shadowMap, sampleWidth, lightSpacePos.xy, lightSpacePos.z, samples, weights, noiseTex);
[3136]188       
[2945]189                diffuse *= shadowTerm;
[2944]190        }
[2945]191
[2974]192        // light ambient term
[2954]193        const float4 ambient = glstate.light[0].ambient;
[3009]194        // compute shading
195        OUT.color = useShading ? (ambient + diffuse) * color : color;
[3087]196        // store scaled view vector from now on so wie don't have to normalize later (e.g., for ssao)
[3137]197        //OUT.color.w = color.w / lenView;
[2991]198
[2928]199        return OUT;
200}
[2965]201
[3081]202#if 0
203/** This shader computes the reprojection and stores reprojected color / depth values
204        as well as a boolean that
205*/
[3087]206pixel Reproject(fragment IN,
207                                uniform sampler2D colors,
208                                uniform sampler2D normals)
[3081]209{
210        float4 norm = tex2Dlod(normals, float4(IN.texCoord, 0 ,0));
211        const float3 normal = normalize(norm.xyz);
212
213        /// reconstruct position from the eye space depth
214        float3 viewDir = IN.view;
215        const float eyeDepth = tex2Dlod(colors, float4(IN.texCoord, 0, 0)).w;
216        const float3 eyeSpacePos = -viewDir * eyeDepth;
217        const float4 worldPos = float4(eyePos + eyeSpacePos, 1.0f);
218
219
220        ////////////////
221        //-- calculcate the current projected posiion (also used for next frame)
222       
223        float4 currentPos = mul(modelViewProj, worldPos);
224       
225        const float w = SAMPLE_RADIUS / currentPos.w;
226        currentPos /= currentPos.w;
227       
228        const float precisionScale = 1e-3f;
229        const float currentDepth = currentPos.z * precisionScale;
230
[3104]231        const float2 ao = ssao(IN, colors, noiseTex, samples, normal,
[3092]232                                   eyeSpacePos, w, bl, br, tl, tr, normalize(viewDir));
[3081]233
234
235        /////////////////
236        //-- compute temporally smoothing
237
238
239        // reproject new frame into old one
240       
241        // calculate projected depth
242        float4 projPos = mul(oldModelViewProj, worldPos);
243        projPos /= projPos.w;
244       
245        // the current depth projected into the old frame
246        const float projDepth = projPos.z * precisionScale;
247        // fit from unit cube into 0 .. 1
248        const float2 tex = projPos.xy * 0.5f + 0.5f;
249        // retrieve the sample from the last frame
250        float4 oldCol = tex2D(oldTex, tex);
251
252        const float oldDepth = oldCol.z;
253        //const float depthDif = 1.0f - projDepth / oldDepth;
254        const float depthDif = projDepth - oldDepth;
255
256        //const float oldNumSamples = oldCol.y;
257        const float oldWeight = clamp(oldCol.y, 0, temporalCoherence);
258
259        float newWeight;
260
261        // the number of valid samples in this frame
262        //const float newNumSamples = ao.y;
263
264        if ((temporalCoherence > 0)
265                && (tex.x >= 0.0f) && (tex.x < 1.0f)
266                && (tex.y >= 0.0f) && (tex.y < 1.0f)
267                && (abs(depthDif) < MIN_DEPTH_DIFF)
268                && (abs(oldCol.x - ao.x) < 0.1f)
269                // if visibility changed in the surrounding area we have to recompute
270                //&& (oldNumSamples > 0.8f * newNumSamples)
271                )
272        {
273                // increase the weight for convergence
274                newWeight = oldWeight + 1.0f;
275                OUT.illum_col.x = (ao.x + oldCol.x * oldWeight) / newWeight;
276                //if (!(oldNumSamples > ao.y - 1.5f)) newWeight = 0;
277        }
278        else
279        {       
280                OUT.illum_col.x = ao.x;
281                newWeight = .0f;
282        }
283
284        OUT.illum_col.y = newWeight;
285        OUT.illum_col.z = currentDepth;
286
287        return OUT;
288}
289
[3134]290#endif
291
292
293float4 Output(fragment IN, uniform sampler2D colors): COLOR
294{   
295        return tex2Dlod(colors, float4(IN.texCoord, 0, 0));
296}
[3137]297
298
299float4 ScaleDepth(fragment IN,
300                                  uniform sampler2D colors): COLOR
301{   
302        float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0));
303        // store scaled view vector so wie don't have to normalize for e.g., ssao
304        color.w /= length(IN.view);
305       
306        return color;
307}
308
309
310float4 DownSample(fragment IN,
311                                  uniform sampler2D colors,
312                                  uniform float2 downSampleOffs[NUM_DOWNSAMPLES]): COLOR
313{   
314        // let bilinear filtering do its work
315        float4 color = tex2Dlod(colors, float4(IN.texCoord, 0, 0));
316        return color;
317}
Note: See TracBrowser for help on using the repository browser.