I’m creating procedural texture, with something like:
int texSize = 20;
Texture2D tex = new Texture2D (texSize, texSize, TextureFormat.RGBA32, false, false);
tex.filterMode = FilterMode.Point;
for (int i = 0; i < tex.width; i++)
{
for(int j = 0; j < tex.height; j++)
{
if(i == 2 && j == 3)
tex.SetPixel(i, j, new Color(1.0f, 0.0f, 0.0f));
if(i == 2 && j == 4)
tex.SetPixel(i, j, new Color(1.0f, 0.0f, 0.0f));
}
}
tex.Apply();
then in shader i want to sample pixel from this texture
Shader "Custom/Terrain"
{
Properties
{
_TexData ("HexData", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 200
//Cull off
CGPROGRAM
#pragma surface surf Lambert finalcolor:mycolor
#pragma target 3.0
sampler2D _TexData;
void surf(Input IN, inout SurfaceOutput o)
{
o.Albedo = grass;
o.Alpha = 1.0;
}
void mycolor (Input IN, SurfaceOutput o, inout fixed4 color)
{
color.rgb = tex2D(_TexData, float2( 3 / 20.0f, 4 / 20.0f));// returns correctly red
//or
color.rgb = tex2D(_TexData, float2( 3 / 20.0f, 5 / 20.0f));// returns white
}
ENDCG
}
FallBack "Diffuse"
}
i’m getting right values when sampling 3 / textSize, 4 / textSize, but wrong for 3 / textSize, 5 / textSize.
i’m saving texture to disk for testing and it looks good:
![]()
is there a way to sample texture by pixel, or fix my pixel to text coord calculation?