I’ve been trying to extract world-space normals in a shader from the DepthTextureMode.DepthNormals pre-pass. However my results don’t seem to be correct - after extracting the normal and converting it to RGB for display, my floor plane which should be normal=(0, 1, 0) has an RGB of (188, 255, 188) rather than (128, 255, 128) as I’d expect.
I’ve stripped it back to this shader but can’t explain the discrepancy:
Shader "Custom/DecodeDepthUnlit"
{
Properties
{
[MaterialToggle]
_ShowNormals ("Show Normals", Int) = 0
}
SubShader
{
Tags
{
"RenderType"="Transparent"
"Queue" = "Transparent-1"
}
ZWrite Off
ZTest Off
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
sampler2D _CameraDepthNormalsTexture;
int _ShowNormals; // 1=show normals, 0=show depth
struct v2f
{
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float4 scrPos: TEXCOORD1;
};
v2f vert (appdata_base v)
{
v2f o;
o.pos = UnityObjectToClipPos (v.vertex);
o.scrPos = ComputeScreenPos(o.pos);
o.uv = v.texcoord;
return o;
}
half4 frag (v2f i) : COLOR
{
float3 viewSpaceNormal;
float viewDepth;
float2 screenPosition = (i.scrPos.xy / i.scrPos.w);
DecodeDepthNormal(tex2D(_CameraDepthNormalsTexture, screenPosition), viewDepth, viewSpaceNormal);
float3 worldNormal = mul((float3x3)unity_MatrixInvV, float4(viewSpaceNormal, 0.0));
if (_ShowNormals == 1)
{
// remap from [-1..+1] to [0..1]
float3 col = (worldNormal * 0.5) + 0.5;
return float4(col, 1.0);
}
else
{
// viewDepth is already in range [0..1]
return float4(viewDepth, viewDepth, viewDepth, 1.0);
}
}
ENDCG
}
}
FallBack "Diffuse"
}
I suspect I’ve been staring at this so long i’m overlooking something stupid. Anyone any pointers?
Thanks.