Hi there,
I´m trying to render a sphere by some simple raytracing inside a cube mesh using a surface shader and shade the sphere correctly. This is the shader I build for this:
Shader "Custom/SurfaceSDF"
{
SubShader
{
Tags {"RenderType" = "Opaque" "Queue" = "Geometry"}
LOD 200
CGPROGRAM
#pragma surface surf NoLighting
#pragma target 3.0
struct Input
{
float3 worldPos;
};
UNITY_INSTANCING_BUFFER_START(Props)
UNITY_INSTANCING_BUFFER_END(Props)
fixed4 LightingNoLighting(SurfaceOutput s, fixed3 lightDir, fixed atten)
{
return fixed4(s.Albedo, s.Alpha);
}
float intersect(in float3 ro, in float3 rd, in float3 ce, in float ra)
{
float3 oc = ro - ce;
float b = dot(oc, rd);
float c = dot(oc, oc) - ra * ra;
float h = b * b - c;
if (h < 0.0) return -1.0; // no intersection
h = sqrt(h);
return -b - h;
}
void surf (Input IN, inout SurfaceOutput o)
{
float3 rd = normalize(IN.worldPos - _WorldSpaceCameraPos);
float d = intersect(_WorldSpaceCameraPos, rd, float3(0, 0, 0), 1);
clip(d);
float3 hit = _WorldSpaceCameraPos + d * rd;
float3 norm = normalize(mul(unity_WorldToObject, hit - float3(0, 0, 0)));
o.Normal = norm;
o.Albedo = o.Normal*0.5 + 0.5;
o.Alpha = 1;
}
ENDCG
}
FallBack "Diffuse"
}
In general, this works and gives a correctly rendered sphere with correct normals when I use the NoLighting light model as you can see in the first image. But when I use the Lambert or Standard lighting model, the edges of the Cube give strange shading effects, as you can see in the second image.
So my question is, if the underlying mesh somehow has an effect on the lighting and not only the computed normals? If yes how could I resolve this issue so that the sphere is shaded correctly?
Thanks for your help in advance!