I have this noise post-processing shader which distorts the view of the player when they go underwater. It has been working for a while now but suddenly stopped working in WebGL build, it still works in the editor.
shader code
// Upgrade NOTE: replaced 'mul(UNITY_MATRIX_MVP,*)' with 'UnityObjectToClipPos(*)'
Shader "MARIS/NoiseDistortion" {
Properties
{
_MainTex("Texture", 2D) = "white" {}
//_NoiseScale("Noise Scale", float) = 1
//_NoiseFrequency("Noise Frequency", float) = 1
//_NoiseSpeed("Noise Speed", float) = 1
//_PixelOffset("Pixel Offset", float) = 0.005
}
SubShader
{
// No culling or depth
//Cull Off ZWrite Off ZTest Always
Cull Off ZWrite Off ZTest Always
Tags { "RenderPipeline" = "UniversalRenderPipeline" "RenderType" = "Opaque" }
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
#include "noiseSimplex.cginc"
#define M_PI 3.1415926535897932384626433832795
uniform float _NoiseScale, _NoiseFrequency, _NoiseSpeed, _PixelOffset;
sampler2D _MainTex;
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float4 scrPos : TEXCOORD1;
};
v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.scrPos = ComputeScreenPos(o.vertex);
o.uv = v.uv;
return o;
}
fixed4 frag(v2f i) : COLOR
{
float3 sPos = float3(i.scrPos.x, i.scrPos.y, 0) * _NoiseFrequency;
sPos.z += _Time.x * _NoiseSpeed;
float noise = _NoiseScale * ((snoise(sPos) + 1) / 2);
float4 noiseToDir = float4(cos(noise * M_PI * 2), sin(noise * M_PI * 2), 0, 0);
float4 pos = i.scrPos + normalize(noiseToDir) * _PixelOffset;
fixed4 col = tex2Dproj(_MainTex, pos);
return col;
}
ENDCG
}
}
}
This is what it looks like in the editor, but in WebGL it just gives me a pink screen.
editor screen shot
Using Unity 2022.3.10f with URP 14.0.8
Anyone know what I am doing wrong?