I’d like to implement a portal effect in VR. To do this, I’ve duplicated the XR Rig’s CenterEye Anchor camera to observe the other side of the portal and render to a RenderTexture whose dimensions are set programmatically to Screen.width and Screen.height. This works great in the Editor because there is no stereo rendering going on and the center camera is exactly what is used.
However, this obviously does not work when deployed to my Quest. I’m stumped as to how to proceed. I set Multiview as my rendering mode in the Oculus XR settings, which I believe is equivalent to Single Pass Stereo.
But how do I create cameras that duplicate the stereo view? How do I create the RenderTexture and have each eye render to the appropriate side? How do I even size that texture?
I can’t find any working examples on the forum.
EDIT:
Here’s what I tried just now:
- Modify my portal shader to accept two textures, left and right, in single stereo mode.
- Modify my portal script to disable the single camera and create two cameras and two render textures in stereo mode.
On device, it just renders black.
Shader
Shader "Custom/Portal"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_LeftEyeTexture ("Texture", 2D) = "white" {}
_RightEyeTexture("Texture", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float4 screenPos : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
float4 _MainTex_ST;
sampler2D _LeftEyeTexture;
sampler2D _RightEyeTexture;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.screenPos = ComputeScreenPos(o.vertex); // use the screen position coordinates of the portal to sample the render texture (which is our screen)
UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}
fixed4 frag(v2f i) : SV_Target
{
float2 uv = i.screenPos.xy / i.screenPos.w; // clip space -> normalized texture (?)
uv = UnityStereoTransformScreenSpaceTex(uv);
// sample the texture
#if SINGLE_PASS_STEREO
fixed4 col = tex2D(_LeftEyeTexture, uv);
#else
fixed4 col = tex2D(_MainTex, uv);
#endif
// apply fog
UNITY_APPLY_FOG(i.fogCoord, col);
return col;
}
ENDCG
}
}
}
Portal Script
using UnityEngine;
public class Portal : MonoBehaviour
{
[Tooltip("Camera observing the other side of the portal.")]
[SerializeField]
private Camera m_otherCamera;
[Tooltip("The other portal transform, which must be the equivalent transform to this portal's.")]
[SerializeField]
private Transform m_otherPortal;
private MeshRenderer m_ourPortalRenderer;
private void Update()
{
Vector3 userOffsetFromPortal = Camera.main.transform.position - transform.position;
m_otherCamera.transform.position = m_otherPortal.transform.position + userOffsetFromPortal;
float angularDifferenceBetweenPortalRotations = Quaternion.Angle(transform.rotation, m_otherPortal.rotation);
Quaternion portalRotationDelta = Quaternion.AngleAxis(angularDifferenceBetweenPortalRotations, Vector3.up);
Vector3 newCameraDirection = portalRotationDelta * Camera.main.transform.forward;
m_otherCamera.transform.rotation = Quaternion.LookRotation(newCameraDirection, Vector3.up);
}
private void Start()
{
if (m_otherCamera.targetTexture != null)
{
m_otherCamera.targetTexture.Release();
}
Debug.LogFormat("Stereo={0}", Camera.main.stereoEnabled);
if (!Camera.main.stereoEnabled)
{
m_otherCamera.targetTexture = new RenderTexture(Camera.main.pixelWidth, Camera.main.pixelHeight, 24);
m_ourPortalRenderer.material.mainTexture = m_otherCamera.targetTexture;
}
else
{
// Disable the camera and attach stereo cameras
m_otherCamera.enabled = false;
GameObject left = new GameObject("LeftEye");
left.transform.parent = m_otherCamera.transform;
left.tag = m_otherCamera.gameObject.tag;
//left.transform.localPosition = -Vector3.right * Camera.main.stereoSeparation;
GameObject right = new GameObject("RightEye");
right.transform.parent = m_otherCamera.transform;
right.tag = m_otherCamera.gameObject.tag;
//right.transform.localPosition = Vector3.right * Camera.main.stereoSeparation;
Camera leftCamera = left.AddComponent<Camera>();
Camera rightCamera = right.AddComponent<Camera>();
leftCamera.CopyFrom(m_otherCamera);
rightCamera.CopyFrom(m_otherCamera);
leftCamera.projectionMatrix = Camera.main.GetStereoProjectionMatrix(Camera.StereoscopicEye.Left);
rightCamera.projectionMatrix = Camera.main.GetStereoProjectionMatrix(Camera.StereoscopicEye.Right);
leftCamera.targetTexture = new RenderTexture(leftCamera.pixelWidth, leftCamera.pixelHeight, 24);
rightCamera.targetTexture = new RenderTexture(rightCamera.pixelWidth, rightCamera.pixelHeight, 24);
leftCamera.enabled = true;
rightCamera.enabled = true;
m_ourPortalRenderer.material.SetTexture("_LeftEyeTexture", leftCamera.targetTexture);
m_ourPortalRenderer.material.SetTexture("_RightEyeTexture", rightCamera.targetTexture);
}
}
private void Awake()
{
m_ourPortalRenderer = GetComponentInChildren<MeshRenderer>();
Debug.Assert(m_otherCamera != null);
Debug.Assert(m_otherPortal != null);
Debug.Assert(m_ourPortalRenderer != null);
}
}