Getting depth in HDRP compute shaders

Hey guys I had a quick question. I’m rendering raymarched objects in HDRP, however I cannot seem to get the camera depth to line up just quite right. I’m using a compute shader in a custom post processing effect to render these objects. If anyone knows why the depth is not lining up right feel free to suggest something!

Here’s the code for the custom post process:

using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.HighDefinition;
using System;

[Serializable, VolumeComponentMenu("Rendering/Raymarched Objects")]
public sealed class RenderRaymarchedObjects : CustomPostProcessVolumeComponent, IPostProcessComponent
{
    [Tooltip("Whether raymarched objects are enabled or not")]
    public BoolParameter enabled = new BoolParameter(false);
    public MinIntParameter maxIterations = new MinIntParameter(128, 8);

    private Material overlayMat;
    private ComputeShader raymarchShader;
    private int raymarchKernel;
    private Transform sun;

    public bool IsActive() => enabled.value;

    // Do not forget to add this post process in the Custom Post Process Orders list (Project Settings > Graphics > HDRP Global Settings).
    public override CustomPostProcessInjectionPoint injectionPoint => CustomPostProcessInjectionPoint.AfterPostProcess;

    private const string kShaderName = "Hidden/Shader/OverlayTextureOnScreen";

    /// <summary>
    /// Runs on shader start
    /// </summary>
    public override void Setup()
    {
        raymarchShader = Resources.Load<ComputeShader>("RaymarchObjects");
        raymarchKernel = raymarchShader.FindKernel("RenderRaymarch");

        Light[] sceneLights = FindObjectsByType<Light>(FindObjectsSortMode.None);
        for (int i = 0; i < sceneLights.Length; i++)
        {
            if (sceneLights[i].type == LightType.Directional)
            {
                sun = sceneLights[i].transform;
                break;
            }
        }

        if (Shader.Find(kShaderName) != null)
            overlayMat = new Material(Shader.Find(kShaderName));
        else
            Debug.LogError($"Unable to find shader '{kShaderName}'. Post Process Volume RenderRaymarchedObjects is unable to load. " +
                $"To fix this, please edit the 'kShaderName' constant in RenderRaymarchedObjects.cs or change the name of your custom post process shader.");
    }

    public override void Render(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandle destination)
    {
        if (overlayMat == null)
            return;

        // Get output texture for raymarching

        RenderTexture output = Rendering.GetTempComputeTexture(source);

        // Set camera vars

        Camera cam = camera.camera;
        raymarchShader.SetMatrix("camToWorld", cam.cameraToWorldMatrix);
        raymarchShader.SetMatrix("camInverseProjection", cam.projectionMatrix.inverse);
        raymarchShader.SetFloat("camFarPlane", cam.farClipPlane);
        raymarchShader.SetFloat("camNearPlane", cam.nearClipPlane);
        raymarchShader.SetVector("size", new Vector2(output.width, output.height));

        // Dispatch raymarching
        
        raymarchShader.SetVector("sunDir", -sun.forward);
        raymarchShader.SetFloat("ambientLight", 0.1f);
        raymarchShader.SetInt("maxIterations", maxIterations.value);
        raymarchShader.SetTexture(raymarchKernel, "result", output);
        raymarchShader.SetTextureFromGlobal(raymarchKernel, "DepthTexture", "_CameraDepthTexture");
        raymarchShader.Dispatch(raymarchKernel, output.width / 8, output.height / 8, 1);

        // Overlay raymarched texture onto scene

        overlayMat.SetFloat("intensity", 1f);
        overlayMat.SetTexture("mainTex", source);
        overlayMat.SetTexture("overlayTex", output);
        HDUtils.DrawFullScreen(cmd, overlayMat, destination, shaderPassId: 0);

        // Release raymarched output texture

        RenderTexture.ReleaseTemporary(output);
    }

    /// <summary>
    /// Memory management
    /// </summary>
    public override void Cleanup()
    {
        CoreUtils.Destroy(overlayMat);
    }
}

And here’s the code for the raymarching:

// Referenced: https://www.youtube.com/watch?v=BNZtUB7yhX4
#pragma kernel RenderRaymarch

#include "UnityCG.cginc"

// Camera vars

float2 size;
float4x4 camToWorld, camInverseProjection;
float camFarPlane, camNearPlane;
Texture2DArray<float> _CameraDepthTexture; // HDRP Depth Texture Array
StructuredBuffer<int2> _DepthPyramidMipLevelOffsets;

// Camera functions

// Gets the view ray direction from a screen coordinate
inline float3 getCamRayDir(float2 coord)
{
    return normalize(mul(camToWorld, float4(mul(camInverseProjection, float4(coord, 0.0f, 1.0f)).xyz, 0.0f)).xyz);
}

float getCamDepth(int2 coord)
{
    int2 mipCoord = coord.xy >> int(0);
    int2 mipOffset = _DepthPyramidMipLevelOffsets[int(0)];
    return Linear01Depth(_CameraDepthTexture.Load(int4(mipOffset + mipCoord, 0, 0)));
}

// SDFs

float sdfSphere(float3 pt, float rad)
{
    return length(pt) - rad;
}

float sdfRoundBox(float3 pt, float3 box, float r)
{
    float3 q = abs(pt) - box + r;
    return length(max(q, 0.0)) + min(max(q.x, max(q.y, q.z)), 0.0) - r;
}

// Raymarching vars

int maxIterations;
float3 sunDir;
float ambientLight;

// Raymarching fucntions

float sampleSceneSDF(float3 pt)
{
    return min(sdfSphere(pt, 1.0f), sdfRoundBox(pt + float3(0, 2, 0), float3(2, 1, 2), 0.1f));
}

float3 calcNormal(float3 pt)
{
    const float EPS = 0.0001f;
    const float2 h = float2(EPS, 0);
    return normalize(float3(sampleSceneSDF(pt + h.xyy) - sampleSceneSDF(pt - h.xyy),
                           sampleSceneSDF(pt + h.yxy) - sampleSceneSDF(pt - h.yxy),
                           sampleSceneSDF(pt + h.yyx) - sampleSceneSDF(pt - h.yyx)));
}

float normalLighting(float3 normal)
{
    return clamp(dot(sunDir, normal), 0, 1);
}

// Buffers

RWTexture2D<float4> result;

// Raymarches the scene
[numthreads(8, 8, 1)]
void RenderRaymarch(uint3 id : SV_DispatchThreadID)
{
    // Get ray
    
    float2 percentOnTex = id.xy / size;
    float3 curRayOrigin = mul(camToWorld, float4(0.0f, 0.0f, 0.0f, 1.0f)).xyz,
        curRayDir = getCamRayDir(percentOnTex * 2 - 1);
    
    // Perform raymarching
    
    float depth = getCamDepth(id.xy);
    float distOnRay = 0;
    
    for (int i = 0; i < maxIterations; i++)
    {
        float3 curRayPt = curRayOrigin + (distOnRay * curRayDir);
        
        float sphereMarch = sampleSceneSDF(curRayPt);
        if (sphereMarch - 0.00001f < 0)
        {
            float3 normal = calcNormal(curRayPt);
            float lightAmt = normalLighting(normal) + ambientLight;
            
            result[id.xy] = float4(float3(depth, 0, 0) * lightAmt, 1);
            break;
        }
        
        if (distOnRay / camFarPlane > depth)
            break;
        distOnRay += sphereMarch;
    }
}

Here’s some screenshots that show how the depth texture is not lining up with the output:

Any help is greatly appreciated!