The world-space distance between the camera and a fragment shouldn’t change at all as the camera rotates, but I have been running into this problem where it does change. I’ve made a simple example to demonstrate.
Below is a view of a landscape with hills and a mountain. The landscape is rendered with a post-process effect (shader code is below) that visualizes the reconstructed world-space distance from the camera’s depth texture, snapped to discrete intervals to make the problem clearer:
Here is where the problem comes in: when I rotate my camera and objects move to the side of the view, their world-space distance shrinks by hundreds of meters (watch the mountain and large hill as they move to the right):
Why are these depth values changing so dramatically? My depth visualization shader is below.
Shader "Visualize Depth PostProcess"
{
Properties
{
_Range("Range", Float) = 2575
}
SubShader
{
Tags { "RenderType"="Opaque" }
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
};
struct v2f
{
float4 vertex : SV_POSITION;
float4 screenPos : TEXCOORD0;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.screenPos = ComputeScreenPos(o.vertex);
return o;
}
float _Range;
sampler2D_float _CameraDepthTexture;
fixed4 frag (v2f IN) : SV_Target
{
float4 depthMapUV4 = UNITY_PROJ_COORD(IN.screenPos);
float rawDepth = tex2Dproj(_CameraDepthTexture, depthMapUV4).r;
float camToFragWorldDist = LinearEyeDepth(rawDepth);
float3 color = saturate(camToFragWorldDist / _Range).xxx;
//To make the effect more stark, snap the depth value to discrete steps.
return float4((0.2 * step(0.1, color)) +
(0.2 * step(0.3, color)) +
(0.2 * step(0.5, color)) +
(0.2 * step(0.7, color)) +
(0.2 * step(0.9, color)),
1);
}
ENDCG
}
}
}