//------------------------------------ float4x4 g_mWorldView; // World View Matrix float4x4 g_mView; // View Matrix float4x4 g_mProj; // Projection Matrix float4x4 g_mWorldViewProjection; // World View Projection Matrix float4x4 g_mViewProjection; float ParticleSize; // Particle's size in world units. float3 ParticlePos; //Position of Particle float ReciprocalParticleSize; // 1/Particle size. float NearPlaneDistance; // Distance of the near plane. float FarPlaneMinusNearPlane; // Distance of the far plane minus distance of the near plane. bool animatedSprite; bool useDepthImposter; float invNbColumns; float invNbRows; int nbColumns; int nbRows; int nbFrame; //CurrentValues float4 argb; float2 hWidthHeight; texture g_txCurrentTexture; texture g_txDepthBuffer; texture preDistortion; texture distortionMap1; texture distortionMap2; texture screenDistortionMap; //------------------------------------ struct vertexInput { float3 position : POSITION; float4 texCoord : TEXCOORD0; }; struct vertexOutput { float4 HPosition : POSITION; float4 ColorTextureCoord : TEXCOORD0; float2 DepthBufferCoord : TEXCOORD1; float ParticleDepth : TEXCOORD2; }; struct cpVertexOutput { float4 HPosition : POSITION; float4 ColorTextureCoord : TEXCOORD0; }; struct pixelOutput { float4 color : COLOR; }; struct depthVertexInput { float4 position : POSITION; float4 texCoord : TEXCOORD0; }; struct depthVertexOutput { float4 HPosition : POSITION; float4 texCoord : TEXCOORD0; }; //------------------------------------ #define SAMPLER_LINEAR(g_samplerMap, g_txMap); \ sampler2D g_samplerMap = sampler_state { \ Texture = ; \ MinFilter = Linear; \ MagFilter = Linear; \ MipFilter = Linear; \ AddressU = BORDER; \ AddressV = BORDER; \ }; #define SAMPLER_POINT(g_samplerMap, g_txMap); \ sampler2D g_samplerMap = sampler_state { \ Texture = ; \ MinFilter = Point; \ MagFilter = Point; \ MipFilter = Point; \ AddressU = BORDER; \ AddressV = BORDER; \ }; #define SAMPLER_LINEAR_WRAP(g_samplerMap, g_txMap); \ sampler2D g_samplerMap = sampler_state { \ Texture = ; \ MinFilter = Linear; \ MagFilter = Linear; \ MipFilter = Linear; \ AddressU = WRAP; \ AddressV = WRAP; \ }; SAMPLER_LINEAR(g_samplerCurrentTexture, g_txCurrentTexture); SAMPLER_POINT(g_samplerDepthBuffer, g_txDepthBuffer); SAMPLER_POINT(samplerPreDistortion, preDistortion); SAMPLER_LINEAR_WRAP(samplerDistMap1, distortionMap1); SAMPLER_LINEAR_WRAP(samplerDistMap2, distortionMap2); SAMPLER_POINT(samplerScreenDistMap, screenDistortionMap); //---------------------------------------------- depthVertexOutput DepthPassVS(depthVertexInput IN) { depthVertexOutput OUT; float4 vertexpos = float4(IN.position.xyz, 1.0); float4 eyespace = mul(vertexpos, g_mWorldViewProjection); OUT.HPosition = eyespace; float tempDepth = saturate(eyespace.z/FarPlaneMinusNearPlane); IN.texCoord.a = tempDepth; OUT.texCoord = IN.texCoord; return OUT; } pixelOutput DepthPassPS(depthVertexOutput IN) { pixelOutput OUT; float depth = IN.texCoord.a; OUT.color = float4(depth, depth, depth, depth); return OUT; } //------------------------------------ vertexOutput DepthImposterVS(vertexInput IN) { vertexOutput OUT; //Billboarding float3 rightVec; rightVec.x = g_mView[0][0]; rightVec.y = g_mView[1][0]; rightVec.z = g_mView[2][0]; float3 upVec; upVec.x = g_mView[0][1]; upVec.y = g_mView[1][1]; upVec.z = g_mView[2][1]; float3 frontVec; frontVec.x = g_mView[0][2]; frontVec.y = g_mView[1][2]; frontVec.z = g_mView[2][2]; float3 pos; pos = ParticlePos; pos += (hWidthHeight.x*IN.position.x*rightVec + hWidthHeight.y*IN.position.y*upVec); pos += ParticleSize*frontVec; float4 finalpos = mul(float4(pos,1), g_mViewProjection); OUT.HPosition = finalpos; if(useDepthImposter) { OUT.ParticleDepth = finalpos.z/FarPlaneMinusNearPlane; //Dividieren durch zfar usw // Apply w-division to get normalized device coordinates [-1,1]. float2 devicecoords = finalpos.xy / finalpos.w; // No transform them into texture coordinates [0,1]. devicecoords = devicecoords * 0.5 + 0.5; devicecoords.y = 1.0 - devicecoords.y; OUT.DepthBufferCoord = devicecoords; } // Texture coordinate depends on current pattern index and pattern size. if(!animatedSprite) { OUT.ColorTextureCoord = IN.texCoord; } else { int column = nbFrame%nbColumns; int framesSub = nbFrame - column; int row = framesSub/nbColumns; OUT.ColorTextureCoord.x = invNbColumns*(column+IN.texCoord.x); OUT.ColorTextureCoord.y = invNbRows*(row+IN.texCoord.y); } return OUT; } //------------------------------------ pixelOutput DepthImposterPS(vertexOutput IN) { pixelOutput OUT; // Do a lookup in the depth buffer. float difference = 1.0f; if(useDepthImposter) { float4 lookup = float4(IN.DepthBufferCoord.xy, 0.0, 1.0); float depth = tex2D(g_samplerDepthBuffer, lookup).a; // Calculate visible range of the particle. difference = abs(depth - IN.ParticleDepth); difference *= ReciprocalParticleSize; difference = min(difference, 1.0); } // Multiply texel by input color. float4 texturecolor = tex2D(g_samplerCurrentTexture, IN.ColorTextureCoord); texturecolor.a *= difference; texturecolor*=argb; OUT.color = texturecolor; return OUT; } //------------------------------------ cpVertexOutput HeatHazeCopyVS(vertexInput IN) { cpVertexOutput OUT; OUT.HPosition = float4(IN.position,1); OUT.ColorTextureCoord = IN.texCoord; return OUT; } //------------------------------------ pixelOutput HeatHazeCopyPS(cpVertexOutput IN) { pixelOutput OUT; float2 deviceCoord = IN.ColorTextureCoord.xy; float2 offset = tex2D(samplerScreenDistMap, deviceCoord).xy; offset = ((offset*2)-1)*0.005; OUT.color = float4(tex2D(samplerPreDistortion, deviceCoord+offset).xyz, 1); return OUT; } //------------------------------------ vertexOutput HeatHazeVS(vertexInput IN) { vertexOutput OUT; //Billboarding float3 rightVec; rightVec.x = g_mView[0][0]; rightVec.y = g_mView[1][0]; rightVec.z = g_mView[2][0]; float3 upVec; upVec.x = g_mView[0][1]; upVec.y = g_mView[1][1]; upVec.z = g_mView[2][1]; float3 frontVec; frontVec.x = g_mView[0][2]; frontVec.y = g_mView[1][2]; frontVec.z = g_mView[2][2]; float3 pos; pos = ParticlePos; pos += (hWidthHeight.x*IN.position.x*rightVec + hWidthHeight.y*IN.position.y*upVec); pos += ParticleSize*frontVec; //pos+=IN.position; // Calculate particle's center position in eye space. /*float4 vertexpos = float4(IN.position.xyz, 1.0); float4 eyespace = mul(vertexpos, g_mWorldView); eyespace.z += ParticleSize; // Multiplicate with projection matrix to get final vertex position. float4 finalpos = mul(eyespace, g_mProj);*/ float4 finalpos = mul(float4(pos,1), g_mViewProjection); OUT.HPosition = finalpos; OUT.ParticleDepth = finalpos.z/FarPlaneMinusNearPlane; //Dividieren durch zfar usw // Apply w-division to get normalized device coordinates [-1,1]. float2 devicecoords = finalpos.xy / finalpos.w; // No transform them into texture coordinates [0,1]. devicecoords = devicecoords * 0.5 + 0.5; devicecoords.y = 1.0 - devicecoords.y; OUT.DepthBufferCoord = devicecoords; // Texture coordinate depends on current pattern index and pattern size. OUT.ColorTextureCoord = IN.texCoord; return OUT; } //------------------------------------ pixelOutput HeatHazePS(vertexOutput IN) { pixelOutput OUT; float2 deviceCoord = IN.DepthBufferCoord.xy; float depth = tex2D(g_samplerDepthBuffer, deviceCoord).a; if(depth>IN.ParticleDepth) { float difference = abs(depth - IN.ParticleDepth); difference *= ReciprocalParticleSize; difference = min(difference, 1.0); float2 offset = tex2D(samplerDistMap1, IN.ColorTextureCoord.xy).xy; float fade = abs(offset.x-0.5)*2*difference; OUT.color = float4((offset-0.5)*difference, 0.5, fade); } else { OUT.color = float4(0.5, 0.5, 0.5, 0); } return OUT; } // // Effect // #define Technique(name); \ technique name \ { \ pass p0 \ { \ VertexShader = compile vs_3_0 name##VS(); \ PixelShader = compile ps_3_0 name##PS(); \ } \ } Technique( DepthImposter ); Technique( DepthPass ); Technique( HeatHazeCopy ); Technique( HeatHaze );