Hi!
I’m working on getting a shader effect to work on OpenGL platforms, and I’m stumped. The shader is for a fadeout plane for Infinite Depth Pits Of Death. It fades out based on the difference in distance from the camera to the plane and the camera to the depth texture. This gives a nice, fog-like effect.
It’s not giving good results on OpenGL, though - I’m using OpenGLES3. Here’s screenshots showing the effect, and the difference between the platforms:
Screens
Here’s the shader code:
Shader code
Shader "Custom/HeightFogShader" {
Properties {
_Color ("Main Color", Color) = (1,1,1,1)
_MainTex ("Base (RGB) Trans (A)", 2D) = "white" {}
_DistanceMultiplier("Distance multiplier", Float) = 1
}
SubShader {
Tags {"Queue"="Transparent+2" "IgnoreProjector"="True" "RenderType"="Transparent"}
Pass{
Blend SrcAlpha OneMinusSrcAlpha
ZWrite Off
Cull Off
CGPROGRAM
#pragma fragment frag
#pragma vertex vert
#include "UnityCG.cginc"
fixed4 _Color;
uniform sampler2D _CameraDepthTexture; //Depth Texture
uniform sampler2D _MainTex;
uniform float _DistanceMultiplier;
float4 _MainTex_ST;
struct v2f{
float2 uv : TEXCOORD0;
float4 pos : SV_POSITION;
float4 projPos : TEXCOORD1; //Screen position of pos
};
v2f vert(appdata_base v){
v2f o;
o.pos = mul(UNITY_MATRIX_MVP, v.vertex);
o.projPos = ComputeScreenPos(o.pos);
o.uv = TRANSFORM_TEX (v.texcoord, _MainTex);
return o;
}
half4 frag (v2f i) : SV_Target {
float sceneZ = LinearEyeDepth(tex2Dproj(_CameraDepthTexture, i.projPos));
float projZ = i.projPos.z;
half4 c = tex2D(_MainTex, i.uv);
c.r *= _Color.r;
c.g *= _Color.g;
c.b *= _Color.b;
float distVal = (sceneZ - projZ) * _DistanceMultiplier * 1.5;
c.a *= _Color.a * distVal * c.a - 0.6f;
return c;
}
ENDCG
}
}
Fallback "Transparent/VertexLit"
}
From doing some debugging, it seems like
o.pos = mul(UNITY_MATRIX_MVP, v.vertex);
o.projPos = ComputeScreenPos(o.pos);
gives different values on the platforms. So if I create a debug color based on i.projPos.z, that changes. On the other hand,
tex2Dproj(_CameraDepthTexture, i.projPos)
gives the same result on the platforms. So somehow sampling the depth texture takes the above difference into account. This causes the discrepancy between the platforms, but I can’t figure out how to counteract that discrepancy.
Can anyone explain what’s going on and how I should go about getting the same result in OpenGL?
