So I have this code:
using System;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
public class QuantizeRenderFeature : ScriptableRendererFeature
{
private class QuantizeRenderPass : ScriptableRenderPass, IDisposable
{
private Material _mat;
private RTHandle _handle;
private RTHandle _target;
private RTHandle _colorCopy;
private static readonly MaterialPropertyBlock s_propertyBlock = new MaterialPropertyBlock();
private static readonly int s_blitTexture = Shader.PropertyToID("_BlitTexture");
private static readonly int s_blitScaleBias = Shader.PropertyToID("_BlitScaleBias");
public QuantizeRenderPass()
{
_mat = CoreUtils.CreateEngineMaterial("Shader Graphs/QuantizeFullscreen");
profilingSampler = new ProfilingSampler("QuantizeRenderPass");
renderPassEvent = RenderPassEvent.BeforeRenderingPostProcessing;
}
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
_target = RTHandles.Alloc(cameraTextureDescriptor.width, cameraTextureDescriptor.height,
colorFormat: cameraTextureDescriptor.graphicsFormat,
depthBufferBits: 0,
name: "_QuantizeRenderPassTexture"
);
}
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
var desc = renderingData.cameraData.cameraTargetDescriptor;
desc.depthBufferBits = 0;
RenderingUtils.ReAllocateIfNeeded(ref _handle, desc, FilterMode.Point, TextureWrapMode.Clamp,
name: "_QuantizeRenderPass");
}
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
ref var cameraData = ref renderingData.cameraData;
var cmd = CommandBufferPool.Get("_QuantizeRenderPass");
using (new ProfilingScope(cmd, profilingSampler))
{
Blitter.BlitCameraTexture(cmd, cameraData.renderer.cameraColorTargetHandle, _target);
CoreUtils.SetRenderTarget(cmd, cameraData.renderer.cameraColorTargetHandle);
s_propertyBlock.Clear();
s_propertyBlock.SetTexture(s_blitTexture, _target);
s_propertyBlock.SetVector(s_blitScaleBias, new Vector4(1, 1, 0, 0));
cmd.DrawProcedural(Matrix4x4.identity, _mat, 0, MeshTopology.Triangles, 3, 1, s_propertyBlock);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
}
public override void OnFinishCameraStackRendering(CommandBuffer cmd) { _target.Release(); }
public void Dispose()
{
_handle?.Release();
CoreUtils.Destroy(_mat);
}
}
private QuantizeRenderPass _pass;
public override void Create() { _pass = new QuantizeRenderPass(); }
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (disposing)
_pass.Dispose();
}
public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
{
renderer.EnqueuePass(_pass);
}
}
But for some reason, in the editor, the screen is black. While in play mode it’s fine. I can see the effect in edit mode if I get rid of line 64, the line that releases the _target RTHandle. Why tf?
I’ve also tried to look in the frame debugger, but one step it’s fine the next it’s black. Weirdly enough, the frame debugger still shows everything black after my pass in play mode even though I can see it displayed.
Please help. What am I doing wrong? I’m just trying to turn my full screen shader into a post processing effect usable by a global volume, I feel like it shouldn’t be so hard.