I’m working with in VR/AR and and streaming point cloud data to the device. I have a geometry shader which takes each vertex and draws a white triangle around it. In the editor this works well, however on the device it’s only rendered on the left display. The rest of the scene is rendered left and right. Unity is using Single Pass Instanced Rendering Mode.
I initially tried flowing Unity’s guide however that didn’t help get the right display rendering.
I’ve also followed the advice on this thread, however, it doesn’t seem to help.
I’m basing my shader on the following source, however this is not set up for stereo.
Any help would be great!
Am I not handeling the instancing correctly?
Shader "Custom/PointCloud" {
Properties{
_Radius("Sphere Radius", float) = 0.005
}
SubShader{
LOD 200
Tags { "RenderType" = "Opaque" }
Pass
{
Cull Off
CGPROGRAM
#pragma vertex vertex_shader
#pragma geometry geometry_shader
#pragma fragment fragment_shader
#pragma target 5.0
#pragma only_renderers d3d11
#include "UnityCG.cginc"
// Variables
float _Radius;
// VERTEX DATA
struct appData {
float4 pos : POSITION;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
// GEOMETRY DATA
struct v2g {
float4 pos : SV_POSITION;
float4 color : COLOR0;
float3 normal : NORMAL;
float r : TEXCOORD0;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
// FRAGMENT DATA
struct g2f {
float4 pos : POSITION;
float4 color : COLOR0;
float3 normal : NORMAL;
UNITY_VERTEX_OUTPUT_STEREO
};
// FUNCTION: Calculate "random" number
float rand(float3 p) {
return frac(sin(dot(p.xyz, float3(12.9898, 78.233, 45.5432))) * 43758.5453);
}
// FUNCTION: Calculate rotation
float2x2 rotate2d(float a) {
float s = sin(a);
float c = cos(a);
return float2x2(c, -s, s, c);
}
// VERTEX SHADER: computes normal wrt camera
v2g vertex_shader(appData v) {
// Output struct to geometry shader
v2g o;
// For single passed stereo rendering
UNITY_SETUP_INSTANCE_ID(v);
UNITY_TRANSFER_INSTANCE_ID(v, o);
// Output clipping position
o.pos = UnityObjectToClipPos(v.pos);
// Distance from vertex to camera
float distance = length(WorldSpaceViewDir(v.pos));
// White
o.color = float4(1, 1, 1, 1);
// Normal facing camera
o.normal = ObjSpaceViewDir(v.pos);
// Calc random value based on object space pos
o.r = rand(v.pos);
return o;
}
// GEOMETRY SHADER: Creates an equilateral triangle centered at vertex
[maxvertexcount(3)]
void geometry_shader(point v2g i[1], inout TriangleStream<g2f> triangleStream)
{
UNITY_SETUP_INSTANCE_ID(i);
// Dimention of geometry
float2 dim = float2(_Radius, _Radius);
// Create equilateral triangle
float2 p[3];
p[0] = float2(-dim.x, dim.y * .57735026919);
p[1] = float2(0., -dim.y * 1.15470053838);
p[2] = float2(dim.x, dim.y * .57735026919);
// Get the rotation from random vert input
float2x2 r = rotate2d(i[0].r * 3.14159);
// Output struct
g2f o;
o.color = i[0].color;
o.normal = i[0].normal;
// Update geometry
[unroll]
for (int idx = 0; idx < 3; idx++) {
p[idx] = mul(r,p[idx]); // apply rotation
p[idx].x *= _ScreenParams.y / _ScreenParams.x; // make square
o.pos = i[0].pos + float4(p[idx],0,0) / 2.;
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
triangleStream.Append(o);
}
}
// FRAGMENT SHADER
float4 fragment_shader(g2f i) : COLOR
{
// Use vertex color
return i.color;
}
ENDCG
}
}
FallBack "Diffuse"
}