How to get camera texture in ARFoundation?

Check this out ma peeps! (I hope this helps, this was a challenge for us!)
Included is a script and a shader; from the blitz project form above and the ARCameraLighting project
Using 2018.3.8f1

C# Code from @tdmowrer & from repo (GitHub - johnsietsma/ARCameraLighting: A demonstration of using the AR camera background for lighting a scene.)

using System;
using UnityEngine;
using UnityEngine.Rendering;
using UnityEngine.XR.ARFoundation;

[RequireComponent(typeof(ReflectionProbe))]
public class ARDeviceScreenReflections : MonoBehaviour
{
    [SerializeField]
    private Camera aRCamera = null;
    [SerializeField]
    [Tooltip("The camera background which controls the camera image.")]
    private ARCameraBackground aRCameraBackground = null;
    [SerializeField]
    private Material skyboxMaterial = null;
    [SerializeField]
    [Tooltip("The RenderTexture to blit the camera image to.")]
    private RenderTexture renderTexture = null;

    public bool IsCapturing { get { return isCapturing; } }

    private Material pastSkyboxMaterial = null;
    private CommandBuffer m_blitCommandBuffer = null;
    private CommandBuffer m_releaseCommandBuffer = null;
    private bool isCapturing;

    private void Awake()
    {
        //Get width and height on targetRenderTexture
        int renderTextureWidth = renderTexture.width;
        int renderTextureHeight = renderTexture.height;

        // Clean up any previous command buffer and events hooks
        if (m_blitCommandBuffer != null)
        {
            aRCamera.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, m_blitCommandBuffer);
            aRCamera.RemoveCommandBuffer(CameraEvent.AfterSkybox, m_releaseCommandBuffer);
        }

        // Create the blit command buffer
        m_blitCommandBuffer = new CommandBuffer();
        m_blitCommandBuffer.GetTemporaryRT(WORKING_RENDER_TEXTURE_ID, renderTextureWidth, renderTextureHeight, 0, FilterMode.Bilinear);
        m_blitCommandBuffer.name = "Get ARBackground";

        //  arCamera.BlitCameraTexture(m_blitCommandBuffer, workingRenderTextureID);
        m_blitCommandBuffer.Blit(null, WORKING_RENDER_TEXTURE_ID, RenderSettings.skybox);

        // Copy over to the target texture.
        m_blitCommandBuffer.Blit(WORKING_RENDER_TEXTURE_ID, renderTexture);

        // Run the command buffer just before opaque rendering
        aRCamera.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, m_blitCommandBuffer);

        // Cleanup the temp render textures
        m_releaseCommandBuffer = new CommandBuffer();
        m_releaseCommandBuffer.name = "Release ARBackground";
        m_releaseCommandBuffer.ReleaseTemporaryRT(WORKING_RENDER_TEXTURE_ID);
        aRCamera.AddCommandBuffer(CameraEvent.AfterSkybox, m_releaseCommandBuffer);

        isCapturing = true;
    }

    private void OnEnable()
    {
        pastSkyboxMaterial = RenderSettings.skybox;
        RenderSettings.skybox = skyboxMaterial;
        ARSubsystemManager.cameraFrameReceived += OnCameraFrameReceived;
    }

    private void Update()
    {
        skyboxMaterial.SetMatrix(WORLD_TO_CAMERA_MATRIX_PROP_ID, aRCamera.worldToCameraMatrix);
    }

    private void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
    {
        BlitToRenderTexture(renderTexture, aRCameraBackground);
    }

    private void OnDisable()
    {
        RenderSettings.skybox = pastSkyboxMaterial;
        ARSubsystemManager.cameraFrameReceived -= OnCameraFrameReceived;
        // Clean up any previous command buffer and events hooks
        if (m_blitCommandBuffer != null && aRCamera != null)
        {
            aRCamera.RemoveCommandBuffer(CameraEvent.BeforeForwardOpaque, m_blitCommandBuffer);
            aRCamera.RemoveCommandBuffer(CameraEvent.AfterSkybox, m_releaseCommandBuffer);
        }
        isCapturing = false;
    }

    public static void BlitToRenderTexture(RenderTexture renderTexture, ARCameraBackground cameraBackground)
    {
        if (renderTexture == null)
            throw new ArgumentNullException("renderTexture");

        if (cameraBackground == null)
            throw new ArgumentNullException("cameraBackground");

        // Copy the camera background to a RenderTexture
        Graphics.Blit(null, renderTexture, cameraBackground.material);
    }

    private static readonly int WORLD_TO_CAMERA_MATRIX_PROP_ID = Shader.PropertyToID("_WorldToCameraMatrix");
    private static readonly int WORKING_RENDER_TEXTURE_ID = Shader.PropertyToID("_ARCameraRenderTexture");
}

Shader for repo (GitHub - johnsietsma/ARCameraLighting: A demonstration of using the AR camera background for lighting a scene.)

Shader "AR/ARSkybox"
{
    Properties
    {
        _LightingTex("Render Texture", 2D) = "white" {}
    }

    CGINCLUDE

    #include "UnityCG.cginc"

    struct appdata
    {
        float4 position : POSITION;
        float3 normal : NORMAL;
        float3 texcoord : TEXCOORD0;
    };

    struct v2f
    {
        float4 position : SV_POSITION;
        float2 texcoord : TEXCOORD0;
    };

    // This relies on a RenderTexture of this name being created in ARCoreCameraRenderTexture.cs.
    sampler2D _LightingTex;
    float4x4 _WorldToCameraMatrix;

    float2 SphereMapUVCoords( float3 viewDir, float3 normal )
    {
        // Sphere mapping. Find reflection and tranform into UV coords.
        // Heavily inspired by https://www.clicktorelease.com/blog/creating-spherical-environment-mapping-shader/
        float3 reflection = reflect(viewDir, normal);
        float m = 2. * sqrt(
            pow(reflection.x, 2.) +
            pow(reflection.y, 2.) +
            pow(reflection.z + 1., 2.)
        );
        return reflection.xy / m + .5;
    }

    v2f vert(appdata v)
    {
        // Create a sphere map with a texture whose center is at the viewDir/sphere intersection.
        // The texture is wrapped around the sphere so that the corners meet directly behind the camera.
        // To do this we could operate in static viewDir (0,0,1) space. We always want to look at the center on the texture.
        // When we move the phone, there is no need to change the view direction.
        // When rendering a skybox, the view direction is altered for each face. Grab the world space view direction to each vert
        //  then reverse the camera's view direction, bringing it back to view space.
        float3 viewDir = -normalize(WorldSpaceViewDir(v.position));
        viewDir = mul(_WorldToCameraMatrix, float4(viewDir,0));

        v2f o;
        o.position = UnityObjectToClipPos(v.position);
        o.texcoord = SphereMapUVCoords(viewDir, v.normal);

        return o;
    }

    fixed4 frag(v2f i) : COLOR
    {
        return tex2D(_LightingTex, i.texcoord);
    }

    ENDCG

    SubShader
    {
        Tags{ "RenderType" = "Background" "Queue" = "Background" }
            Pass
        {
            ZWrite Off
            Cull Off
            Fog{ Mode Off }
            CGPROGRAM
#pragma fragmentoption ARB_precision_hint_fastest
#pragma vertex vert
#pragma fragment frag
            ENDCG
        }
    }
}
1 Like

Thx so much for sharing! :smile: I"m having trouble getting it to work thoughā€¦ :frowning: Does this require anything else from the ARCameraLighting project? Iā€™ve created and assigned the necessary components onto ARDeviceScreenReflections on a Reflection Probe. I allowed realtime reflections in settingsā€¦ Iā€™m stumpedā€¦

The only way Iā€™ve gotten it to work is to set the camera to target the CameraRenderTexture and have that assigned to a Raw Imageā€¦ not ideal. Could you possibly email me a basic scene file with it setup? christougher@hotmail.com. Iā€™d be happy to detail the instructions here for anyone else. Again thanks so much for sharing!

Okay, Iā€™m not sure but for us, this worked only after fiddling with the reflection probesā€”>
Make sure you have enabled reflection probes in the settings, make sure a large enough reflection probe is in the scene, the meshes have the blend probe option, and that they are capturing at an equal resolution to your render texture (width and height both), the reflection probe is realtime, and the same reflection probe is refreshing every frame. We tested on Unity version 2018.3.8f1 on iPhone8.

Besides that, check that the environment reflections are also the same resolution of all the other things, AND that the environment reflections source is set to skybox.

1 Like

Thanks so much, Iā€™ll check it out!

Working!!! Got it going on Android on my Pixel 2. I couldnā€™t figure out what was the problem was with my scene. Everything was still showing up black. I finally saw that your rendertexture was set to No Depth Buffer. After making that change everything worked as it should.

2 Likes

Quick question on this, are CameraConfigurations compatible with using XRCameraSubsystem.TryGetLatestImage to get the raw image from the CPU? For some reason, trying to set the CameraConfiguration is preventing us from getting an image where we otherwise have no problem.

Hi tdmowrer,

I am new to ARFoundation, my requirements is to position the 3d content on both detected Vertical and Horizontal QR code pose. In ARkit i am using ā€œARTextureHandles handles = arSession.GetARVideoTextureHandles();ā€ to get values. please advise on the syntax to be used in ARFoundation for this scenario .

Is blitting the CameraTexture working in 2019 with AR Foundation/has worked for anyone else? I seem to get errors on iOS.

tdmowrer. Is there still a link to How to get camera texture in ARFoundation? ?
Iā€™m trying to mask the ARBackground camera image and thought this might help.

We cleaned up some old branches recently. This post might help you, though.

1 Like

Thanks!
So I am successfully grabbing the ARCamerabackground with Graphics.Blit but how do do this without actually rendering the source ARCamerabackground?
ie I only want to see the Rendertexture I am targeting.

Is it necessary to modify the ARCameraBackground component or is there another way?

Funny enough, My issue is the exact opposite, I actually want the unity objects in my picture, but theyā€™re not showing up.

using UnityEngine;
using UnityEngine.XR.ARFoundation;
using System;
using Infrastructure.CoroutineRunner;

namespace Infrastructure.CCSystem
{
    public class CameraCaptureSystem : ICameraCaptureSystem
    {
        public static Texture2D m_Texture;

        private RenderTexture renderTexture;
        private Texture2D lastCameraTexture;

        private ARCameraBackground aRCameraBackground;
        private ICoroutineRunner coroutineRunner;

        public CameraCaptureSystem(
            ARCameraBackground aRCameraBackground,
            RenderTexture renderTexture,
            ICoroutineRunner coroutineRunner)
        {
            this.aRCameraBackground = aRCameraBackground;
            this.renderTexture = renderTexture;
            this.coroutineRunner = coroutineRunner;

            RenderTexture.active = renderTexture;
        }

        public void CapturePhoto()
        {
            Graphics.Blit(null, renderTexture, aRCameraBackground.material);

            var activeRenderTexture = RenderTexture.active;
            RenderTexture.active = renderTexture;
            if (lastCameraTexture == null)
                lastCameraTexture = new Texture2D(renderTexture.width, renderTexture.height, TextureFormat.RGB24, true);
            lastCameraTexture.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);
            lastCameraTexture.Apply();
            RenderTexture.active = activeRenderTexture;

            m_Texture = lastCameraTexture;
        }
    }
}

Iā€™m in Unity 2019.1.6f1
AR foundation 2.2.0
AR core 2.1.0

How can I get objects I spawn in ar to show up in a picture?

3 Likes

I am getting a black screen instead, Here is what I am trying

Texture2D cameraTexture = new Texture2D( (int)width, (int) height, TextureFormat.RGB24, false);
        RenderTexture rt = new RenderTexture((int)width, (int)height, 24, RenderTextureFormat.ARGB32);
        RenderTexture.active = rt;

        arCamera.targetTexture = rt;
        arCamera.Render();
cameraTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0);
        cameraTexture.Apply();

        RenderTexture.active = null;
        RenderTexture.ReleaseTemporary(rt);
        Destroy(rt);
File.WriteAllBytes(screenShotPath, cameraTexture.EncodeToPNG());

Seems like it should work. The docs suggest using Graphics.Blit to achieve a similar result. Also note that ReadPixels is very slow (~20 ms) and should be avoided if possible. Thereā€™s a separate API for accessing the camera image on the CPU.

Hi tdmowrer,
I donā€™t understand well, which is the most efficient method to get the camera image on iOS ? The one you wrote here (which use ReadPixels function that you say it should be avoid) or the ā€œCPUā€ method that you links in your last post ?
I just want to get the iPhone camera feed (so, nothing more than that the camera sees) and encode it to MP4 (while seeing the AR stuff of my application on my iPhone)
Thanks for your explanation.

Hi @tdmowrer , I tried using Graphics.Blit and there is not AR Object in the image that gets saved.

It sounds like you need the camera image on the CPU, so you should use the API I linked for accessing the camera image on the CPU.

@tdmowrer , I have tried using the API and also tried using Graphics.Blit, but the result is same. I am unable to get the AR object in the Image that gets saved.

The methods discussed here only provide the camera texture; it would not include any virtual content in your scene. What is your use case? Are you just trying to take a screenshot?

Yeah I am trying to take a screen shot from AR camera and apply an overlay on the final image. So, I am assigning a rendertexture to AR camera and then using that renderTexture in another camera to apply overlay on the screenshot. It is working fine for Android but for iOS the screen turns black as soon as a renderTexture is assigned to the AR camera.
Here is what I am trying and working fine for Android but not for iOS.

Texture2D cameraTexture = new Texture2D( (int)width, (int) height, TextureFormat.RGB24, false);
        RenderTexture rt = new RenderTexture((int)width, (int)height, 24, RenderTextureFormat.ARGB32);
        RenderTexture.active = rt;
        arCamera.targetTexture = rt;
        arCamera.Render();
        arCamera.targetTexture = null;
       
        overlayCamera.gameObject.SetActive( true );
        overlayCamera.targetTexture = rt;
        overlayCamera.Render();
        overlayCamera.targetTexture = null;
        overlayCamera.gameObject.SetActive( false );
cameraTexture.ReadPixels(new Rect(0, 0, rt.width, rt.height), 0, 0);
        cameraTexture.Apply();
        RenderTexture.active = null;
        RenderTexture.ReleaseTemporary(rt);
        Destroy(rt);
File.WriteAllBytes(screenShotPath, cameraTexture.EncodeToPNG());