I’m adapting the default Unity UI shader and want to use tex.Load to get a specific pixel. Simply saying someTex.Load(x,y,z) doesn’t work and I don’t quite understand why.
Here’s what I’ve got:
Shader "UI/TexLoad Wrong"
{
Properties
{
[PerRendererData] _MainTex ("Main", 2D) = "white" {}
_ColorScheme ("Color Scheme", 2D) = "white" {}
_StencilComp ("Stencil Comparison", Float) = 8
_Stencil ("Stencil ID", Float) = 0
_StencilOp ("Stencil Operation", Float) = 0
_StencilWriteMask ("Stencil Write Mask", Float) = 255
_StencilReadMask ("Stencil Read Mask", Float) = 255
_ColorMask ("Color Mask", Float) = 15
[Toggle(UNITY_UI_ALPHACLIP)] _UseUIAlphaClip ("Use Alpha Clip", Float) = 0
// do noise
}
SubShader
{
Tags
{
"Queue"="Transparent"
"IgnoreProjector"="True"
"RenderType"="Transparent"
"PreviewType"="Plane"
"CanUseSpriteAtlas"="True"
}
Stencil
{
Ref [_Stencil]
Comp [_StencilComp]
Pass [_StencilOp]
ReadMask [_StencilReadMask]
WriteMask [_StencilWriteMask]
}
Cull Off
Lighting Off
ZWrite Off
ZTest [unity_GUIZTestMode]
Blend SrcAlpha OneMinusSrcAlpha
ColorMask [_ColorMask]
Pass
{
Name "Default"
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma target 2.0
#include "UnityCG.cginc"
#include "UnityUI.cginc"
#pragma multi_compile_local _ UNITY_UI_CLIP_RECT
#pragma multi_compile_local _ UNITY_UI_ALPHACLIP
struct appdata_t
{
float4 vertex : POSITION;
float4 color : COLOR;
float2 texcoord : TEXCOORD0;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float4 vertex : SV_POSITION;
fixed4 color : COLOR;
float2 texcoord : TEXCOORD0;
float4 worldPos : TEXCOORD1;
float4 somePixel : TEXCOORD2;
UNITY_VERTEX_OUTPUT_STEREO
};
sampler2D _ColorScheme;
sampler2D _MainTex;
fixed4 _TextureSampleAdd;
float4 _ClipRect;
float4 _MainTex_ST;
v2f vert(appdata_t v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
o.worldPos = v.vertex;
o.vertex = UnityObjectToClipPos(o.worldPos);
o.texcoord = TRANSFORM_TEX(v.texcoord, _MainTex);
o.color = v.color;
// *** LOAD THE CORRECT PIXEL
int x = 1; // get this
int y = 1; // get that
o.somePixel = _ColorScheme.Load(int3(x, y, 0)); // THIS IS WRONG!
return o;
}
fixed4 frag(v2f i) : SV_Target
{
half4 main = (tex2D(_MainTex, i.texcoord) + _TextureSampleAdd);
// TODO apply somePixel
#ifdef UNITY_UI_CLIP_RECT
color.a *= UnityGet2DClipping(i.worldPos.xy, _ClipRect);
#endif
#ifdef UNITY_UI_ALPHACLIP
clip (color.a - 0.001);
#endif
return color;
}
ENDCG
}
}
}
The issue is #80 ( sampler2D _ColorScheme; ) combined with line #99 ( o.somePixel = _ColorScheme.Load(int3(x, y, 0)); ).
In another shader, I could use TEXTURE2D(_ColorScheme); then could call _ColorScheme.Load(x,y,z) but that’s no good here, so I’m wondering what I need to do to access the texture in this context to be able to use tex.Load.