I was fooling around with the depth texture when I came across something unexpected that I’m sure has a simple explanation, but has me baffled. What I’m doing is sample the depth texture at each frag, and use that value (0…1 non-linear depth value, 0 being far, 1 near pane) as the pixle’s color (optionally mulitplied with a factor to make it a bit brighter (see shader, below)
Put on a quad, I can see the objects behind the quad in greyscale, as expected. Until, that is, I switch the scene’s one directional light off. Then suddenly, the entire quad turns white (indicating all pixels are on the near pane), even if there are point lights distributed in the scene
So, my simple question is: why? What am I missing?
With directional light:
Shader:
Shader "Unlit/zbuffer2color"
{
Properties
{
_Magnify ("Magnify", Range(1, 100))= 50
}
SubShader
{
Tags { "Queue"="Transparent" "RenderType"="Transparent" }
Pass
{
Cull Off
ZWrite Off
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct vert2frag
{
float4 vertex : SV_POSITION;
float4 screenPosRaw : TEXCOORD2;
float eyeZ : TEXCOORD3;
};
sampler2D _CameraDepthTexture;
float _Magnify;
vert2frag vert (appdata v)
{
vert2frag o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.screenPosRaw = ComputeScreenPos(o.vertex);
return o;
}
fixed4 frag (vert2frag i) : SV_Target
{
float4 screenPosRefined = UNITY_PROJ_COORD(i.screenPosRaw);
float opaqZRaw = SAMPLE_DEPTH_TEXTURE_PROJ(_CameraDepthTexture, screenPosRefined) * _Magnify;
float4 color = float4(opaqZRaw, opaqZRaw, opaqZRaw,1);
return color;
}
ENDCG
}
}
}
