I’m experimenting with some shader techniques, and I’ve run into something that I can’t make sense of.
In the code you can see that I am simply outputting the camera position as a color. As I understand it, this value should be the same for each fragment.
In the game view it works fine. When I move the camera around, the color changes with the camera position and is uniform across all fragments.
In the scene view there’s all kinds of weirdness. From certain viewing angles there’s more than one color being rendered. If I’m outputting the World Space Camera Position for all fragments, how is it possible for any fragments to have different values?
Shader "GunWhale/Background/Cloud"
{
Properties
{
[MainTexture] _BaseMap("Albedo", 2D) = "white" {}
}
SubShader
{
Tags{"RenderType" = "Opaque"}
LOD 300
Pass
{
Name "ForwardLit"
Tags{"LightMode" = "UniversalForward"}
ZWrite On
HLSLPROGRAM
#pragma target 4.5
#pragma vertex CloudVertex
#pragma fragment CloudFragment
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl"
struct Attributes
{
float4 positionOS : POSITION;
float2 uv : TEXCOORD0;
};
struct Varyings
{
float4 positionCS : SV_POSITION;
float2 uv : TEXCOORD2;
};
Varyings CloudVertex (Attributes input)
{
Varyings output;
VertexPositionInputs vertexInput = GetVertexPositionInputs(input.positionOS.xyz);
output.positionCS = vertexInput.positionCS;
output.uv = input.uv;
return output;
}
half4 CloudFragment(Varyings input) : SV_Target
{
half4 col = half4(_WorldSpaceCameraPos, 1);
return col;
}
ENDHLSL
}
}
}