EDIT: Found a better approach than the one below described in my fifth post ( https://forum.unity.com/threads/enabling-post-processing-removes-benefits-from-msaa-and-render-scale.1461779/#post-9181940 ))
So, last update from me: I implemented the the two camera approach in my real project. I decided to use a
ScriptableRendererFeature for rendering the post processed texture instead of a Canvas/RawImage. But this does not make a difference on FPS, still I personally find the approach cleaner directly rendering the Post Processing texture this way. To get that two work two different 2D renderer are required:

The MainRenderer2D is used for the main player camera, while the EffectsRender2D for the effects camera, which is a child of the main player camera using the same transform position and orthographic size to ensure it always has the same scene view. The EffectsRenderer2D uses the customized UberPost shader which allows preserving the alpha as mentionend in my last post:

(The inspector has to be switched to Debug mode to show the shader list above)
The SH_AlphaUberPost_Default shader can be constructed using the following thread. It should be constructed because the original UberPost.shader may change between URP versions even minor version ones:
Also don’t foget to compare/update the shader when URP versions change.
The MainRender2D has the DetachedEffectsRenderPassFeature added:

The material M_EffectsRenderPass is using the shader code I posted in my previous post. This is the feature code:
DetachedEffectsRenderPassFeature
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
namespace My.Game.Scripts.Graphics
{
/// <summary>
/// Render pass feature for rendering effects rendered by the effects camera into a effect render texture to the pipeline.
/// </summary>
/// <remarks>
/// This has been implemented for rendering Post Processing effects without harming
/// MSAA and render scale, because otherwise pixel flickering effects may re-occur.
/// </remarks>
public class DetachedEffectsRenderPassFeature : ScriptableRendererFeature
{
public Material Material;
private bool _isEnabled;
private bool _isReady;
private Camera _registeredEffectsCamera;
private EffectsRenderPass _renderPass;
private RenderTexture _renderTexture;
/// <inheritdoc />
public DetachedEffectsRenderPassFeature()
{
this._renderPass = new()
{
renderPassEvent = RenderPassEvent.AfterRendering,
};
this.SetEnabled(false, true);
}
/// <inheritdoc />
protected override void Dispose(bool disposing)
{
this.ReleaseEffectsTexture();
base.Dispose(disposing);
}
/// <summary>
/// Gets or sets a value indicating whether the effects render pass feature is enabled or not.
/// </summary>
public bool IsEnabled
{
get => this._isEnabled;
set =>
// Force always because of issues how serialization and deserialization works on RenderPassFeatures
this.SetEnabled(value, true);
}
// Here you can inject one or multiple render passes in the renderer.
// This method is called when setting up the renderer once per-camera.
public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData)
{
if (this.IsEnabled && this._isReady)
{
renderer.EnqueuePass(this._renderPass);
}
}
/// <inheritdoc />
public override void Create()
{
if (this.Material == null)
{
Debug.LogWarning($"No material specified for '{nameof(DetachedEffectsRenderPassFeature)}'.");
return;
}
this._renderPass.Material = this.Material;
this._renderPass.RenderTexture = this._renderTexture;
}
private void CreateEffectsTexture()
{
if (this._renderTexture != null)
{
if (this._registeredEffectsCamera)
{
this._registeredEffectsCamera.targetTexture = null;
}
this._renderTexture.Release();
}
// EDIT: Do not increase the Render Scale, because it will be increased automatically
//var renderScale = UniversalRenderPipeline.asset.renderScale;
//this._renderTexture = new RenderTexture(Mathf.CeilToInt(Screen.width * renderScale), Mathf.CeilToInt(Screen.height * renderScale),
this._renderTexture = new RenderTexture(Screen.width, Screen.height,
Mathf.CeilToInt(Screen.height),
GraphicsFormat.R16G16B16A16_SFloat, GraphicsFormat.None)
{
anisoLevel = 0,
filterMode = FilterMode.Point, // EDIT: When desried other filter modes could be used
wrapMode = TextureWrapMode.Clamp,
dimension = TextureDimension.Tex2D,
antiAliasing = 1, // EDIT: When desired MSAA can be used
useDynamicScale = false,
useMipMap = false,
autoGenerateMips = false,
enableRandomWrite = false,
depth = 0,
memorylessMode = RenderTextureMemoryless.Depth,
};
// Assign textures and configure appropriately
this._renderPass.RenderTexture = this._renderTexture;
this.Material.mainTexture = this._renderTexture;
// Update camera target texture
if (this._registeredEffectsCamera)
{
this._registeredEffectsCamera.targetTexture = this._renderTexture;
}
this._isReady = true;
}
/// <summary>
/// Registers a camera which records onto the effects texture.
/// </summary>
/// <param name="camera">The camera which records to the effects texture.</param>
public void RegisterTargetCamera(Camera camera)
{
if (this._registeredEffectsCamera != camera)
{
if (this._registeredEffectsCamera)
{
this._registeredEffectsCamera.targetTexture = null;
}
this._registeredEffectsCamera = camera;
if (this._renderTexture != null)
{
this._registeredEffectsCamera.targetTexture = this._renderTexture;
}
else
{
this._registeredEffectsCamera.targetTexture = null;
}
}
}
private void ReleaseEffectsTexture()
{
if (this._isReady)
{
if (this._registeredEffectsCamera)
{
this._registeredEffectsCamera.targetTexture = null;
}
this._renderTexture.Release();
this.Material.mainTexture = null;
this._renderPass.RenderTexture = null;
this._isReady = false;
}
}
private void SetEnabled(bool isEnabled, bool forced)
{
if (this._isEnabled != isEnabled || forced)
{
if (isEnabled)
{
this.CreateEffectsTexture();
}
else
{
this.ReleaseEffectsTexture();
}
this._isEnabled = isEnabled;
}
}
private class EffectsRenderPass : ScriptableRenderPass
{
public Material Material;
public RenderTexture RenderTexture;
private RenderTargetIdentifier _currentTarget;
/// <inheritdoc />
public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor)
{
}
// Here you can implement the rendering logic.
// Use <c>ScriptableRenderContext</c> to issue drawing commands or execute command buffers
// https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.html
// You don't have to call ScriptableRenderContext.submit, the render pipeline will call it at specific points in the pipeline.
public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData)
{
// Skip rendering if the render texture is null or not created
if (this.RenderTexture == null || !this.RenderTexture.IsCreated() || this.Material == null)
{
return;
}
var cmd = CommandBufferPool.Get("RenderDetachedPostProcessingEffects");
cmd.Blit(this.RenderTexture, renderingData.cameraData.renderer.cameraColorTargetHandle, this.Material);
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
}
// Cleanup any allocated resources that were created during the execution of this render pass.
public override void OnCameraCleanup(CommandBuffer cmd)
{
}
// This method is called before executing the render pass.
// It can be used to configure render targets and their clear state. Also to create temporary render target textures.
// When empty this render pass will render to the active camera render target.
// You should never call CommandBuffer.SetRenderTarget. Instead call <c>ConfigureTarget</c> and <c>ConfigureClear</c>.
// The render pipeline will ensure target setup and clearing happens in a performant manner.
public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderingData)
{
}
}
}
}
Last but not least I implemented a GraphicsController-MonoBehavior for enabling/disabling the DetachedEffectsRenderPassFeature and adjusting the cameras:
GraphicsController
using System.Linq;
using Microsoft.Extensions.Logging;
using My.Game.Scripts.Dependencies;
using My.Game.Scripts.Project;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.Rendering.Universal;
using UnityEngine.Serialization;
using Zenject;
namespace My.Game.Scripts.Graphics
{
public class GraphicsController : MonoBehaviour
{
[Tooltip("The effects renderer 2D used for rendering post processing effects only when configured.")]
public Renderer2DData EffectsRenderer2D = null!;
[FormerlySerializedAs("IsSeparatingEffects")]
[Tooltip("The post processing effects are processed detached from the main camera.")]
public bool IsDetachedPostProcessingEffects;
[Tooltip("The main renderer 2D used for rendering all kinds of game objects.")]
public Renderer2DData MainRenderer2D = null!;
[Inject(Id = DependencyIdentifiers.SceneHierarchy.Cameras.Effects)]
private Camera _effectsCamera;
[Inject]
private ILogger<GraphicsController> _logger;
[Inject(Id = DependencyIdentifiers.SceneHierarchy.Cameras.Player)]
private Camera _playerCamera;
private UniversalRenderPipelineAsset? _urpAsset;
private void Start()
{
if (this.MainRenderer2D == null)
{
this._logger.LogWarning("No main renderer 2D set, '{Name}' not working correctly", nameof(GraphicsController));
return;
}
if (this.EffectsRenderer2D == null)
{
this._logger.LogWarning("No effects renderer 2D set, '{Name}' not working correctly", nameof(GraphicsController));
return;
}
var urpAsset = GraphicsSettings.currentRenderPipeline as UniversalRenderPipelineAsset;
if (urpAsset != null)
{
this._urpAsset = urpAsset;
this.UpdateGraphicSettings();
}
else
{
this._logger.LogWarning("URP asset not found, '{Name}' not working correctly", nameof(GraphicsController));
}
}
public void UpdateGraphicSettings()
{
if (this._urpAsset != null)
{
this.UpdateDetachedPostProcessingEffects();
}
else
{
this._logger.LogWarning("URP asset not found, graphic settings cannot be updated.");
}
}
private void UpdateDetachedPostProcessingEffects()
{
var effectsRenderPassFeature = this.MainRenderer2D.rendererFeatures.OfType<DetachedEffectsRenderPassFeature>().Single();
var isDetachedPostProcessingEffects = this.IsDetachedPostProcessingEffects;
// When effect separation is enabled
if (isDetachedPostProcessingEffects)
{
unchecked
{
this._playerCamera.cullingMask = (int)(UnityUserLayers.All & ~UnityUserLayers.Effects);
}
}
else
{
unchecked
{
this._playerCamera.cullingMask = (int)UnityUserLayers.All;
}
}
this._playerCamera.GetUniversalAdditionalCameraData().renderPostProcessing = !isDetachedPostProcessingEffects;
this._logger.LogInformation("Setting enabled of effects render pass feature to '{EnabledState}'", isDetachedPostProcessingEffects);
this._effectsCamera.gameObject.SetActive(isDetachedPostProcessingEffects);
effectsRenderPassFeature.RegisterTargetCamera(this._effectsCamera);
effectsRenderPassFeature.IsEnabled = isDetachedPostProcessingEffects;
}
}
}
I made a performance test in my real project and the approach results in a drop from 530 to 450 FPS. This are roughly 15% loss, which I find acceptable. Without really knowing I guess the fewer the frames the lesser the impact of this approach. I make this technique a graphical quality setting in my game and assign them to the upper quality presets, so the player can decide whether to use it or not. So at the end I am satisfied with this solution, most importantly a clean camera movement quality can be achieved with the upper quality presets.