Shader Bug Below Camera's Y

So I have been following this tutorial on https://www.udemy.com/course/unity-shaders/
Today I have been trying to do the raymarching spherical fog on a custom procedural mesh I have built previously, but I seem to be running into some problems with my implementation.

The first bug seems to be in the ordering or layering of the mesh with the raymarching shader. What seems to be happening is that for all the pixels below camera level, it is grabbing from the far, non-culled plane. Camera angle seems does not impact this particular effect.



The second one occurs when the angle between the object and the camera is high but still within viewing range. In the attached pictures, the orange/salmon color is supposed to be well below water line, but with that angle(view angle is 60 degrees so only between 20 and 30 degrees), and seemingly along the normals that point towards the camera, the water disappears.

I would not be surprised if these two bugs are linked. The color coming from the texture sampler is correct as I have a surface shader that uses the uv stored in the mesh to sample the ocean texture, and I have tried a simpler one that just tried to make the ocean layer transparent through the alpha channel but it was not the effect I was going for. I have also downloaded and copied in Sebastian Lague’s ocean shader in the github linked through here

and it produces the same effect. So with all of those factors I believe it is probably a pretty simple error in my code that I have missed. I have written that shader twice now over the last 5 hours so I feel it’s probably good to try and get some more eyes on the problem. Let me know what y’all think

Shader "IE/OceanShader"
{
    Properties
    {
        _OceanTex ("Texture", 2D) = "white" {}
        _DefaultColor("Default Color", Color) = (0.5, 0, 1, 1)
        _SeaRadius("Sea Radius", float) = 0.5
        _FogCenter("Fog Center/Radius", Vector) = (0, 0, 0, 0.5)
        _InnerRatio("Inner Ratio", Range(0.0, 1.0)) = 0.5
        _Density("Density", Range(0.0, 1.0)) = 0.5
        _StepCount("Step Count", Range(0, 100)) = 5
    }
    SubShader
    {
        Tags { "Queue"="Transparent" }
        Blend SrcAlpha OneMinusSrcAlpha
        Cull Off Lighting Off ZWrite Off
        ZTest Always

        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            #include "UnityCG.cginc"

            sampler2D _OceanTex;
            float4 _OceanTex_ST;
            fixed4 _DefaultColor;
            float _SeaRadius;
            float4 _FogCenter;
            float _InnerRatio;
            float _Density;
            int _StepCount;
            sampler2D _CameraDepthTexture;

            float CalculateFogIntensity(
                float3 sphereCenter,
                float sphereRadius,
                float innerRatio,
                float density,
                float3 cameraPosition,
                float3 viewDirection,
                float maxDistance
                )
            {
                // Calculate Ray Sphere.
                float3 localCam = cameraPosition - sphereCenter;
                float a = dot(viewDirection, viewDirection);
                float b = 2 * dot(viewDirection, localCam);
                float c = dot(localCam, localCam) - sphereRadius * sphereRadius;
                float d = b * b - 4 * a * c;
                if(d <= 0.0)
                    return 0;

                float dSqrt = sqrt(d);
                float nearDist = max((-b - dSqrt) / (2 * a), 0);
                float farDist = max((-b + dSqrt) / (2 * a), 0);
                   
                float backDepth = min(maxDistance, farDist);
                float sample = nearDist;
                float stepDistance = (backDepth - nearDist) / _StepCount;
                float stepContribution = density;
                float centerValue = 1 / (1 - innerRatio);

                float clarity = 1;
                for (int seg = 0; seg < _StepCount; seg++)
                {
                    float3 locPos = localCam + viewDirection * sample;
                    float val = saturate(centerValue * (1 - length(locPos)/sphereRadius));
                    float fogAmount = saturate(val * stepContribution);
                    clarity *= (1 - fogAmount);
                    sample += stepDistance;
                }
                return 1 - clarity;
            }

            struct v2f
            {
                float3 viewDir : TEXCOORD0;
                float4 pos : SV_POSITION;
                float4 projPos : TEXCOORD1;
                float2 locTex : TEXCOORD2;
            };



            v2f vert (appdata_base v)
            {
                v2f o;
                float4 wPos = mul(unity_ObjectToWorld, v.vertex);
                o.pos = UnityObjectToClipPos(v.vertex);
                o.viewDir = wPos.xyz - _WorldSpaceCameraPos;
                o.projPos = ComputeScreenPos(o.pos);
                o.locTex = v.texcoord;
                float inFrontOf = (o.pos.z/_SeaRadius) > 0;
                    o.pos.z * inFrontOf;

                return o;
            }

            fixed4 frag (v2f i) : SV_Target
            {
                half4 color = half4(0.5, 0, 1, 1);
                float depth = LinearEyeDepth(UNITY_SAMPLE_DEPTH(tex2Dproj(_CameraDepthTexture, UNITY_PROJ_COORD(i.projPos))));
                float3 viewDir = normalize(i.viewDir);
                float fog = CalculateFogIntensity(_FogCenter.xyz, _SeaRadius, _InnerRatio, _Density, _WorldSpaceCameraPos, viewDir, depth);
                color.rgb = tex2D(_OceanTex, i.locTex);
                //color.rgb = _DefaultColor;
                color.a = fog;
                return color;
            }
            ENDCG
        }
    }
}

So I’ve learned some things between the op and now. I can get most of the effect that I want if I have ZTest LEqual and Cull On but the ones that I’m trying to replicate allow for the camera to be in the water object with ZTest Always and Cull Off, so I want to understand how to get my shader to that point. The Lague example uses a mask so that’s what I’ve been studying lately. I think that example also does not use the depth texture as far as I could tell, and I believe the light-dark bands at camera level on the edges of my sphere are probably associated with the non-linear depth texture issues I’ve seen elsewhere. Preferably my ocean shader would be able to render images from beneath the water’s surface, but it’s a lower priority goal than having a semi-transparent ocean.

I have searched and searched and searched and cannot find anything similar to the problem I am experiencing. I have downloaded free water assets and I keep getting the same problem. It’d be easier to search if I had any idea what words to use to describe the problem. I’ve searched through suggestions about alpha clipping, cutouts, transparent material problems, camera height rendering bugs, the depth texture, triplanar mapping, and many more. The best way I can describe the problem is that below the plane of the camera, the water’s material will render in front of everything regardless of world position or where I think it should be in the depth buffer. If the camera is at a lower y level than the entire sphere, and it is looking up at the sphere then the problem is not noticeable. If the camera is at higher y level than the entire sphere then the entire sphere is overlapped by the farside’s ocean texture. It’s like it’s sampling the color and alpha for the front and backside for all pixels below the camera’s y level. I don’t think it has anything to do with raymarching but everything to do with how I’m handling the transparency. I keep seeing suggestions about trying to break up a transparent shader into a geometry pass and something else but I have no idea if that will even solve my problem as I simply have no idea what the problem is that’s leading to the visual bug I am experiencing. I seem to remember seeing something about it being a problem when two meshes intersect and one is transparent but it seems to be fine above camera y so I’m not convinced that’s the whole problem.

I feel like I’m so close to actually solving this problem but I have been hung up all week on it and I really want to just give up, just use an opaque one and move onto other tasks. It feels like it’s a fundamental misunderstanding I am having with Unity and shaders, and probably the depth texture but because I don’t know what words to search for I have to hope I find the right combination that leads to me to the likely countless repeated answers about the problem.

So here is what happens when I use this free asset, and this is pretty much exactly what I want from my shader too (https://assetstore.unity.com/packages/vfx/shaders/stylized-water-for-urp-162025?aid=1011l3n8v&utm_source=aff). Notice the color bar at the plane where the camera is, the extra dark ring along the periphery where it seems to be combining the front and back colors of the ocean, and the ripping at the camera plane at the edges of the sphere(upwards on the left side and downwards on the right). The caustics projection seems to interact with the camera level plane also but it seems to stay below the projection through the xz plane, so I have no idea if this is connected to the problem too:

Literally any idea would help at this point

Added a depth write prepass that I have seen recommended elsewhere and this is how things are looking using the shader I wrote. It feels a little better as it’s no longer layering the far backside over the near face side (mostly) but then it’s also completely overwriting the underlying terrain so I’m not convinced it has helped.

Here’s the code:

Shader "IE/OceanShader"
{
    Properties
    {
        _OceanTex ("Texture", 2D) = "white" {}
        _DefaultColor("Default Color", Color) = (0.5, 0, 1, 1)
        _SeaRadius("Sea Radius", float) = 0.5
        _FogCenter("Fog Center/Radius", Vector) = (0, 0, 0, 0.5)
        _InnerRatio("Inner Ratio", Range(0.0, 1.0)) = 0.5
        _Density("Density", Range(0.0, 1.0)) = 0.5
        _StepCount("Step Count", Range(0, 100)) = 5
    }
    SubShader
    {


        Pass
        {
            ZWrite On
            // disable rendering to color channels
            ColorMask 0
        }


        Pass
        {
                    Tags
        {
            "Queue"="Transparent"
            "RenderType" = "Transparent"

        }
            Blend SrcAlpha OneMinusSrcAlpha
            Cull Off Lighting Off ZWrite Off
            ZTest Always
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            #include "UnityCG.cginc"
            sampler2D _OceanTex;
            float4 _OceanTex_ST;
            fixed4 _DefaultColor;
            float _SeaRadius;
            float4 _FogCenter;
            float _InnerRatio;
            float _Density;
            int _StepCount;
            sampler2D _CameraDepthTexture;
            float CalculateFogIntensity(
                float3 sphereCenter,
                float sphereRadius,
                float innerRatio,
                float density,
                float3 cameraPosition,
                float3 viewDirection,
                float maxDistance
                )
            {
                // Calculate Ray Sphere.
                float3 localCam = cameraPosition - sphereCenter;
                float a = dot(viewDirection, viewDirection);
                float b = 2 * dot(viewDirection, localCam);
                float c = dot(localCam, localCam) - sphereRadius * sphereRadius;
                float d = b * b - 4 * a * c;
                if(d <= 0.0)
                    return 0;
                float dSqrt = sqrt(d);
                float nearDist = max((-b - dSqrt) / (2 * a), 0);
                float farDist = max((-b + dSqrt) / (2 * a), _SeaRadius);
               
                float backDepth = min(maxDistance, farDist);
                float sample = nearDist;
                float stepDistance = (backDepth - nearDist) / _StepCount;
                float stepContribution = density;
                float centerValue = 1 / (1 - innerRatio);
                float clarity = 1;
                for (int seg = 0; seg < _StepCount; seg++)
                {
                    float3 locPos = localCam + viewDirection * sample;
                    float val = saturate(centerValue * (1 - length(locPos)/sphereRadius));
                    float fogAmount = saturate(val * stepContribution);
                    clarity *= (1 - fogAmount);
                    sample += stepDistance;
                }
                return 1 - clarity;
            }
            struct v2f
            {
                float3 viewDir : TEXCOORD0;
                float4 pos : SV_POSITION;
                float4 projPos : TEXCOORD1;
                float2 locTex : TEXCOORD2;
                float3 vertPos : TEXCOORD3;
            };
            v2f vert (appdata_base v)
            {
                v2f o;
                o.vertPos = v.vertex;
                float4 wPos = mul(unity_ObjectToWorld, v.vertex);
                o.pos = UnityObjectToClipPos(v.vertex);
                o.viewDir = wPos.xyz - _WorldSpaceCameraPos;
                o.projPos = ComputeScreenPos(o.pos);
                o.locTex = v.texcoord;
                float inFrontOf = (o.pos.z/o.pos.w) > 0;
                    o.pos.z *= inFrontOf;
                return o;
            }
            fixed4 frag (v2f i) : SV_Target
            {
                half4 color = half4(0.5, 0, 1, 1);
                float depth = LinearEyeDepth(UNITY_SAMPLE_DEPTH(tex2Dproj(_CameraDepthTexture, UNITY_PROJ_COORD(i.projPos))));
                float3 viewDir = normalize(i.viewDir);
                float fog = CalculateFogIntensity(i.vertPos, _SeaRadius, _InnerRatio, _Density, _WorldSpaceCameraPos, viewDir, depth);
                color.rgb = tex2D(_OceanTex, i.locTex);
                //color.rgb = float3(depth, 0.5, 0.5);
                color.a = fog;
                return color;
            }
            ENDCG
        }
    }
            Fallback "Diffuse"

}

And the dead ends I have tried to follow since my last post.

I’m partially convinced that it might have something to do with my custom mesh as all the working transparent shaders that I have seen just use the default primitives, and I literally cannot find any other examples of this problem. There are only two meshes in that scene. One for the ocean and one for the land. Is having them intertangled the reason it works above the camera but not below? I suspect at least one of the people in those threads had the same one but because the posts are ancient the pics are all gone. Is it some problem not at all to do with shaders which would be why I can’t get any help on here or the discord for that udemy course? 150 eyes have seen this glitch but no one has any suggestions? Surely someone that knows a thing or two about shaders has looked at this post? If it’s already been answered many times before then please just reply with a link

Here is the shader on some primitives