3D系列4.14 镜面高光反射-平面凹凸映射
你可以在教程6.9 添加HLSL镜面高光找到镜面高光反射的详细解释,在教程5.15 在3D世界添加水面找到平面凹凸映射的详细解释。所以,本章只简要讨论一下理论,重点放在代码的实现上。
我们的移动水面看起来很棒,但还有一个可以很容易添加的东西没有实现,水面的镜面高光。水面上一个小区域反射很强烈,这个局域附近反射阳光(或另一个光源),如下图所示:
问题是:我们如何找到这个区域内的像素?这很简单:将光线方向 (图中的左边箭头) 关于法线方向镜像,将它与eyevector (图中的右边箭头)比较。如果两者几乎相同,像素就位于镜面高光区域。那么HLSL代码如何编写?非常简单,使用reflect函数。因为我们已经定义了一个normalVector并在像素着色器中计算了eyeVector。
下面这行代码计算关于法线向量对称的光线方向:
float3 reflectionVector = -reflect(xLightDirection, normalVector);
然后,我们需要知道反射向量与eyeVector的接近程度。这可以通过计算两者的点乘获取:点乘的结果为1表示两者完全相同,为0表示两者垂直:
float specular = dot(normalize(reflectionVector), normalize(eyeVector));
因为我们只关心相似程度大于99%的向量,即只关心点乘结果大于0.95的向量。通过将这个值进行一个很高次幂的计算,这样导致只有那些非常接近于1的值才会保留(理由可参见教程6.4 在反光表面添加镜面高光和6.9 添加HLSL镜面高光):
specular = pow(specular, 256); Output.Color.rgb += specular;
最后一行代码将计算的结果添加到最终的颜色中。如果运行代码,截图如下:
看起来很好,但像一个平的盘子。这是因为水面的每个像素的法线向量都是(0,1,0)向上的,而实际上法线会被波浪扰动。在“3Dseries4.11波浪”一章中,我们已经知道凹凸贴图包含了每个像素的实际法线方向。现在,就可以用存储在凹凸贴图中的向量替换固定的(0,1,0)向量了。所以在像素着色器中找到定义normalVector的代码行,将它替换为以下代码:
float3 normalVector = (bumpColor.rbg-0.5f)*2.0f;
在“3Dseries4.11波浪”一章中已经解释过,颜色分量的值区间位于[0,1],要将它们映射到[-1,1]区间,我们需要将它们减0.5再乘以2。
当运行代码后,你可以看到如下图所示的结果。
改变normalVector向量不仅影响到高光反射,菲涅尔系数也得到了改进。
当代码运行一段时间后,你会很容易地注意到高光区域中的凹凸贴图块,这可以通过多次滚动凹凸贴图,每次使用一个不同的缩放,不同的滚动速度,并交换X和Y纹理坐标加以解决。这种做法在5.15 在3D世界添加水面中也使用过。
XNA代码没有变化,所以只列出HLSL代码,红色部分为相对于上一章改变的部分:
//----------------------------------------------------
//-- --
//-- www.riemers.net --
//-- Series 4: Advanced terrain --
//-- Shader code --
//-- --
//----------------------------------------------------
![]()
//------- Constants --------
float4x4 xView;
float4x4 xReflectionView;
float4x4 xProjection;
float4x4 xWorld;
float3 xLightDirection;
float xAmbient;
bool xEnableLighting;
float xWaveLength;
float xWaveHeight;
float3 xCamPos;
float xTime;
float xWindForce;
float3 xWindDirection;
![]()
//------- Texture Samplers --------
Texture xTexture;
![]()
sampler TextureSampler = sampler_state { texture = <xTexture> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = mirror; AddressV = mirror;};Texture xTexture0;
![]()
sampler TextureSampler0 = sampler_state { texture = <xTexture0> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = wrap; AddressV = wrap;};Texture xTexture1;
![]()
sampler TextureSampler1 = sampler_state { texture = <xTexture1> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = wrap; AddressV = wrap;};Texture xTexture2;
![]()
sampler TextureSampler2 = sampler_state { texture = <xTexture2> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = mirror; AddressV = mirror;};Texture xTexture3;
![]()
sampler TextureSampler3 = sampler_state { texture = <xTexture3> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = mirror; AddressV = mirror;};Texture xReflectionMap;
![]()
sampler ReflectionSampler = sampler_state { texture = <xReflectionMap> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = mirror; AddressV = mirror;};Texture xRefractionMap;
![]()
sampler RefractionSampler = sampler_state { texture = <xRefractionMap> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = mirror; AddressV = mirror;};Texture xWaterBumpMap;
![]()
sampler WaterBumpMapSampler = sampler_state { texture = <xWaterBumpMap> ; magfilter = LINEAR; minfilter = LINEAR; mipfilter=LINEAR; AddressU = mirror; AddressV = mirror;};
//------- Technique: Textured --------
struct TVertexToPixel
{
float4 Position : POSITION;
float4 Color : COLOR0;
float LightingFactor: TEXCOORD0;
float2 TextureCoords: TEXCOORD1;
};
![]()
struct TPixelToFrame
{
float4 Color : COLOR0;
};
![]()
TVertexToPixel TexturedVS( float4 inPos : POSITION, float3 inNormal: NORMAL, float2 inTexCoords: TEXCOORD0)
{
TVertexToPixel Output = (TVertexToPixel)0;
float4x4 preViewProjection = mul (xView, xProjection);
float4x4 preWorldViewProjection = mul (xWorld, preViewProjection);
![]()
Output.Position = mul(inPos, preWorldViewProjection);
Output.TextureCoords = inTexCoords;
![]()
float3 Normal = normalize(mul(normalize(inNormal), xWorld));
Output.LightingFactor = 1;
if (xEnableLighting)
Output.LightingFactor = saturate(dot(Normal, -xLightDirection));
![]()
return Output;
}
![]()
TPixelToFrame TexturedPS(TVertexToPixel PSIn)
{
TPixelToFrame Output = (TPixelToFrame)0;
![]()
Output.Color = tex2D(TextureSampler, PSIn.TextureCoords);
Output.Color.rgb *= saturate(PSIn.LightingFactor + xAmbient);
![]()
return Output;
}
![]()
technique Textured_2_0
{
pass Pass0
{
VertexShader = compile vs_2_0 TexturedVS();
PixelShader = compile ps_2_0 TexturedPS();
}
}
![]()
technique Textured
{
pass Pass0
{
VertexShader = compile vs_1_1 TexturedVS();
PixelShader = compile ps_1_1 TexturedPS();
}
}
![]()
//------- Technique: Multitextured --------
struct MTVertexToPixel
{
float4 Position : POSITION;
float4 Color : COLOR0;
float3 Normal : TEXCOORD0;
float2 TextureCoords : TEXCOORD1;
float4 LightDirection : TEXCOORD2;
float4 TextureWeights : TEXCOORD3;
float Depth : TEXCOORD4;
};
![]()
struct MTPixelToFrame
{
float4 Color : COLOR0;
};
![]()
MTVertexToPixel MultiTexturedVS( float4 inPos : POSITION, float3 inNormal: NORMAL, float2 inTexCoords: TEXCOORD0, float4 inTexWeights: TEXCOORD1)
{
MTVertexToPixel Output = (MTVertexToPixel)0;
float4x4 preViewProjection = mul (xView, xProjection);
float4x4 preWorldViewProjection = mul (xWorld, preViewProjection);
![]()
Output.Position = mul(inPos, preWorldViewProjection);
Output.Normal = mul(normalize(inNormal), xWorld);
Output.TextureCoords = inTexCoords;
Output.LightDirection.xyz = -xLightDirection;
Output.LightDirection.w = 1;
Output.TextureWeights = inTexWeights;
Output.Depth = Output.Position.z/Output.Position.w;
![]()
return Output;
}
![]()
MTPixelToFrame MultiTexturedPS(MTVertexToPixel PSIn)
{
MTPixelToFrame Output = (MTPixelToFrame)0;
![]()
float lightingFactor = 1;
if (xEnableLighting)
lightingFactor = saturate(saturate(dot(PSIn.Normal, PSIn.LightDirection)) + xAmbient);
![]()
float blendDistance = 0.99f;
float blendWidth = 0.005f;
float blendFactor = clamp((PSIn.Depth-blendDistance)/blendWidth, 0, 1);
![]()
float4 farColor;
farColor = tex2D(TextureSampler0, PSIn.TextureCoords)*PSIn.TextureWeights.x;
farColor += tex2D(TextureSampler1, PSIn.TextureCoords)*PSIn.TextureWeights.y;
farColor += tex2D(TextureSampler2, PSIn.TextureCoords)*PSIn.TextureWeights.z;
farColor += tex2D(TextureSampler3, PSIn.TextureCoords)*PSIn.TextureWeights.w;
![]()
float4 nearColor;
float2 nearTextureCoords = PSIn.TextureCoords*3;
nearColor = tex2D(TextureSampler0, nearTextureCoords)*PSIn.TextureWeights.x;
nearColor += tex2D(TextureSampler1, nearTextureCoords)*PSIn.TextureWeights.y;
nearColor += tex2D(TextureSampler2, nearTextureCoords)*PSIn.TextureWeights.z;
nearColor += tex2D(TextureSampler3, nearTextureCoords)*PSIn.TextureWeights.w;
![]()
Output.Color = lerp(nearColor, farColor, blendFactor);
Output.Color *= lightingFactor;
![]()
return Output;
}
![]()
technique MultiTextured
{
pass Pass0
{
VertexShader = compile vs_1_1 MultiTexturedVS();
PixelShader = compile ps_2_0 MultiTexturedPS();
}
}
![]()
//------- Technique: Water --------
struct WVertexToPixel
{
float4 Position : POSITION;
float4 ReflectionMapSamplingPos : TEXCOORD1;
float2 BumpMapSamplingPos : TEXCOORD2;
float4 RefractionMapSamplingPos : TEXCOORD3;
float4 Position3D : TEXCOORD4;
};
![]()
struct WPixelToFrame
{
float4 Color : COLOR0;
};
![]()
WVertexToPixel WaterVS(float4 inPos : POSITION, float2 inTex: TEXCOORD)
{
WVertexToPixel Output = (WVertexToPixel)0;
![]()
float4x4 preViewProjection = mul (xView, xProjection);
float4x4 preWorldViewProjection = mul (xWorld, preViewProjection);
float4x4 preReflectionViewProjection = mul (xReflectionView, xProjection);
float4x4 preWorldReflectionViewProjection = mul (xWorld, preReflectionViewProjection);
![]()
Output.Position = mul(inPos, preWorldViewProjection);
Output.ReflectionMapSamplingPos = mul(inPos, preWorldReflectionViewProjection);
Output.RefractionMapSamplingPos = mul(inPos, preWorldViewProjection);
Output.Position3D = mul(inPos, xWorld);
![]()
float3 windDir = normalize(xWindDirection);
float3 perpDir = cross(xWindDirection, float3(0,1,0));
float ydot = dot(inTex, xWindDirection.xz);
float xdot = dot(inTex, perpDir.xz);
float2 moveVector = float2(xdot, ydot);
moveVector.y += xTime*xWindForce;
Output.BumpMapSamplingPos = moveVector/xWaveLength;
![]()
![]()
return Output;
}
![]()
WPixelToFrame WaterPS(WVertexToPixel PSIn)
{
WPixelToFrame Output = (WPixelToFrame)0;
![]()
float4 bumpColor = tex2D(WaterBumpMapSampler, PSIn.BumpMapSamplingPos);
float2 perturbation = xWaveHeight*(bumpColor.rg - 0.5f)*2.0f;
![]()
float2 ProjectedTexCoords;
ProjectedTexCoords.x = PSIn.ReflectionMapSamplingPos.x/PSIn.ReflectionMapSamplingPos.w/2.0f + 0.5f;
ProjectedTexCoords.y = -PSIn.ReflectionMapSamplingPos.y/PSIn.ReflectionMapSamplingPos.w/2.0f + 0.5f;
float2 perturbatedTexCoords = ProjectedTexCoords + perturbation;
float4 reflectiveColor = tex2D(ReflectionSampler, perturbatedTexCoords);
![]()
float2 ProjectedRefrTexCoords;
ProjectedRefrTexCoords.x = PSIn.RefractionMapSamplingPos.x/PSIn.RefractionMapSamplingPos.w/2.0f + 0.5f;
ProjectedRefrTexCoords.y = -PSIn.RefractionMapSamplingPos.y/PSIn.RefractionMapSamplingPos.w/2.0f + 0.5f;
float2 perturbatedRefrTexCoords = ProjectedRefrTexCoords + perturbation;
float4 refractiveColor = tex2D(RefractionSampler, perturbatedRefrTexCoords);
![]()
![]()
float3 eyeVector = normalize(xCamPos - PSIn.Position3D);
![]()
float3 normalVector = (bumpColor.rbg-0.5f)*2.0f;
![]()
float fresnelTerm = dot(eyeVector, normalVector);
float4 combinedColor = lerp(reflectiveColor, refractiveColor, fresnelTerm);
![]()
float4 dullColor = float4(0.3f, 0.3f, 0.5f, 1.0f);
![]()
Output.Color = lerp(combinedColor, dullColor, 0.2f);
![]()
![]()
float3 reflectionVector = -reflect(xLightDirection, normalVector);
float specular = dot(normalize(reflectionVector), normalize(eyeVector));
specular = pow(specular, 256);
Output.Color.rgb += specular;
![]()
![]()
return Output;
}
![]()
technique Water
{
pass Pass0
{
VertexShader = compile vs_1_1 WaterVS();
PixelShader = compile ps_2_0 WaterPS();
}
}
发布时间:2009/12/22 下午2:30:58 阅读次数:7565