I came across this intriguing method of how to render a sphere using a single quad. This is useful for rendering a shitload of spheres, for example coming from a particle system.
http://www.sunsetlakesoftware.com/2011/05/08/enhancing-molecules-using-opengl-es-20
I got it working perfectly in forward rendering, but in deferred rendering the lighting screws up. I get a dark line on the side of the sphere. The closer the sphere is located to the screen edges the worse the effect gets. I just don’t understand why…
I am suspecting that something fishy is happening after I pass the world normals through the g-buffer to Unity’s lighting system. I also don’t quite understand the size difference to the standard (triangulated) sphere.
I have attached an example package. Shader model 4.0 is required. Not sure it will fly on OSX, I made it on Windows. It does this:
- Attaches a CommandBuffer to each camera requesting a single vertex to be rendered with a shader.
- In the shader, the vertex is expanded to a quad in the geometry function.
- In the shaders fragment function sphere normals are computed and outputted to the g-buffer.
(More comments can be found in the code)
Any hints are appreciated!
Carl Emil
[maxvertexcount(4)]
void geom( point VS_OUT p[1], uint id : SV_PrimitiveID, inout TriangleStream<GS_OUT> triStream )
{
float4 pos = float4( 0, 0, 0, 1 ); // Draw at zero, just for testing.
float4 clipPos = mul( UNITY_MATRIX_VP, pos );
float2 extents = 1;
extents.x *= _ScreenParams.y / _ScreenParams.x; // Aspect correction.
GS_OUT o;
o.vertex = clipPos;
// Expand.
o.vertex.xy = clipPos.xy + float2( extents.x, -extents.y );
o.quadpos = float2( 1, 1 );
triStream.Append(o);
o.vertex.xy = clipPos.xy + extents;
o.quadpos = float2( 1, -1 );
triStream.Append(o);
o.vertex.xy = clipPos.xy - extents;
o.quadpos = float2( -1, 1 );
triStream.Append(o);
o.vertex.xy = clipPos.xy + float2( -extents.x, extents.y );
o.quadpos = float2( -1, -1 );
triStream.Append(o);
}
void fragDeferred
(
GS_OUT i,
out half4 outGBuffer0 : SV_Target0,
out half4 outGBuffer1 : SV_Target1,
out half4 outGBuffer2 : SV_Target2,
out half4 outEmission : SV_Target3 // RT3: emission (rgb), --unused-- (a)
){
// Test against circle.
float r = dot( i.quadpos, i.quadpos ); // square distance
if( r > 1 ) discard;
// Compute sphere world normal.
r = sqrt( r ); // Radius from quad center
float depth = sqrt( 1 - r*r ); // a'2 + b'2 = c'2
float3 screenNormal = float3( i.quadpos, depth );
float3 worldNormal = mul( (float3x3) _InvViewMatrix, screenNormal );
// Populate UnityStandardData and assemble using UnityStandardDataToGbuffer.
UnityStandardData data;
data.diffuseColor = 1;
data.occlusion = 1;
data.specularColor = _Specular.xxx;
data.smoothness = _Smoothness;
data.normalWorld = worldNormal;
UnityStandardDataToGbuffer( data, outGBuffer0, outGBuffer1, outGBuffer2 );
outEmission = half4( 0, 0, 0, 1 );
#ifndef UNITY_HDR_ON
outEmission.rgb = exp2(-outEmission.rgb);
#endif
}
