Check this out ma peeps! (I hope this helps, this was a challenge for us!)
Included is a script and a shader; from the blitz project form above and the ARCameraLighting project
Using 2018.3.8f1
C# Code from @tdmowrer & from repo (GitHub - johnsietsma/ARCameraLighting: A demonstration of using the AR camera background for lighting a scene.)
using System;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.XR.ARFoundation;
[RequireComponent(typeof(ReflectionProbe))]
public class ARDeviceScreenReflections : MonoBehaviour
{
[SerializeField]
private Camera aRCamera = null;
[SerializeField]
[Tooltip("The camera background which controls the camera image.")]
private ARCameraBackground aRCameraBackground = null;
[SerializeField]
private Material skyboxMaterial = null;
[SerializeField]
[Tooltip("The RenderTexture to blit the camera image to.")]
private RenderTexture renderTexture = null;
public bool IsCapturing { get { return isCapturing; } }
private Material pastSkyboxMaterial = null;
private CommandBuffer m_blitCommandBuffer = null;
private CommandBuffer m_releaseCommandBuffer = null;
private bool isCapturing;
private void Awake()
{
//Get width and height on targetRenderTexture
int renderTextureWidth = renderTexture.width;
int renderTextureHeight = renderTexture.height;
// Clean up any previous command buffer and events hooks
if (m_blitCommandBuffer != null)
{
aRCamera.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, m_blitCommandBuffer);
aRCamera.RemoveCommandBuffer(CameraEvent.AfterSkybox, m_releaseCommandBuffer);
}
// Create the blit command buffer
m_blitCommandBuffer = new CommandBuffer();
m_blitCommandBuffer.GetTemporaryRT(WORKING_RENDER_TEXTURE_ID, renderTextureWidth, renderTextureHeight, 0, FilterMode.Bilinear);
m_blitCommandBuffer.name = "Get ARBackground";
// arCamera.BlitCameraTexture(m_blitCommandBuffer, workingRenderTextureID);
m_blitCommandBuffer.Blit(null, WORKING_RENDER_TEXTURE_ID, RenderSettings.skybox);
// Copy over to the target texture.
m_blitCommandBuffer.Blit(WORKING_RENDER_TEXTURE_ID, renderTexture);
// Run the command buffer just before opaque rendering
aRCamera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, m_blitCommandBuffer);
// Cleanup the temp render textures
m_releaseCommandBuffer = new CommandBuffer();
m_releaseCommandBuffer.name = "Release ARBackground";
m_releaseCommandBuffer.ReleaseTemporaryRT(WORKING_RENDER_TEXTURE_ID);
aRCamera.AddCommandBuffer(CameraEvent.AfterSkybox, m_releaseCommandBuffer);
isCapturing = true;
}
private void OnEnable()
{
pastSkyboxMaterial = RenderSettings.skybox;
RenderSettings.skybox = skyboxMaterial;
ARSubsystemManager.cameraFrameReceived += OnCameraFrameReceived;
}
private void Update()
{
skyboxMaterial.SetMatrix(WORLD_TO_CAMERA_MATRIX_PROP_ID, aRCamera.worldToCameraMatrix);
}
private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
{
BlitToRenderTexture(renderTexture, aRCameraBackground);
}
private void OnDisable()
{
RenderSettings.skybox = pastSkyboxMaterial;
ARSubsystemManager.cameraFrameReceived -= OnCameraFrameReceived;
// Clean up any previous command buffer and events hooks
if (m_blitCommandBuffer != null && aRCamera != null)
{
aRCamera.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, m_blitCommandBuffer);
aRCamera.RemoveCommandBuffer(CameraEvent.AfterSkybox, m_releaseCommandBuffer);
}
isCapturing = false;
}
public static void BlitToRenderTexture(RenderTexture renderTexture, ARCameraBackground cameraBackground)
{
if (renderTexture == null)
throw new ArgumentNullException("renderTexture");
if (cameraBackground == null)
throw new ArgumentNullException("cameraBackground");
// Copy the camera background to a RenderTexture
Graphics.Blit(null, renderTexture, cameraBackground.material);
}
private static readonly int WORLD_TO_CAMERA_MATRIX_PROP_ID = Shader.PropertyToID("_WorldToCameraMatrix");
private static readonly int WORKING_RENDER_TEXTURE_ID = Shader.PropertyToID("_ARCameraRenderTexture");
}
Shader for repo (GitHub - johnsietsma/ARCameraLighting: A demonstration of using the AR camera background for lighting a scene.)
Shader "AR/ARSkybox"
{
Properties
{
_LightingTex("Render Texture", 2D) = "white" {}
}
CGINCLUDE
#include "UnityCG.cginc"
struct appdata
{
float4 position : POSITION;
float3 normal : NORMAL;
float3 texcoord : TEXCOORD0;
};
struct v2f
{
float4 position : SV_POSITION;
float2 texcoord : TEXCOORD0;
};
// This relies on a RenderTexture of this name being created in ARCoreCameraRenderTexture.cs.
sampler2D _LightingTex;
float4x4 _WorldToCameraMatrix;
float2 SphereMapUVCoords( float3 viewDir, float3 normal )
{
// Sphere mapping. Find reflection and tranform into UV coords.
// Heavily inspired by https://www.clicktorelease.com/blog/creating-spherical-environment-mapping-shader/
float3 reflection = reflect(viewDir, normal);
float m = 2. * sqrt(
pow(reflection.x, 2.) +
pow(reflection.y, 2.) +
pow(reflection.z + 1., 2.)
);
return reflection.xy / m + .5;
}
v2f vert(appdata v)
{
// Create a sphere map with a texture whose center is at the viewDir/sphere intersection.
// The texture is wrapped around the sphere so that the corners meet directly behind the camera.
// To do this we could operate in static viewDir (0,0,1) space. We always want to look at the center on the texture.
// When we move the phone, there is no need to change the view direction.
// When rendering a skybox, the view direction is altered for each face. Grab the world space view direction to each vert
// then reverse the camera's view direction, bringing it back to view space.
float3 viewDir = -normalize(WorldSpaceViewDir(v.position));
viewDir = mul(_WorldToCameraMatrix, float4(viewDir,0));
v2f o;
o.position = UnityObjectToClipPos(v.position);
o.texcoord = SphereMapUVCoords(viewDir, v.normal);
return o;
}
fixed4 frag(v2f i) : COLOR
{
return tex2D(_LightingTex, i.texcoord);
}
ENDCG
SubShader
{
Tags{ "RenderType" = "Background" "Queue" = "Background" }
Pass
{
ZWrite Off
Cull Off
Fog{ Mode Off }
CGPROGRAM
#pragma fragmentoption ARB_precision_hint_fastest
#pragma vertex vert
#pragma fragment frag
ENDCG
}
}
}