Some have asked for an example of capturing a holographic photo that blends in with the physical environment. You can do this by using the PhotoCapture API along with the projection and world to camera matrices that are included with the captured image data.
Attached is a C# script and a shader. The C# script captures an image using the web camera on the HoloLens whenever you do the airtap gesture. The C# script will upload the captured image to the GPU so that the shader can access the image data. The shader will calculate what part of the image should be shown based on where the photo was taken.
HoloLensSnapshotTest.cs
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.VR.WSA.WebCam;
using UnityEngine.VR.WSA.Input;
public class HoloLensSnapshotTest:MonoBehaviour
{
GestureRecognizer m_GestureRecognizer;
GameObject m_Canvas = null;
Renderer m_CanvasRenderer = null;
PhotoCapture m_PhotoCaptureObj;
CameraParameters m_CameraParameters;
bool m_CapturingPhoto = false;
Texture2D m_Texture = null;
void Start()
{
Initialize();
}
void SetupGestureRecognizer()
{
m_GestureRecognizer = new GestureRecognizer();
m_GestureRecognizer.SetRecognizableGestures(GestureSettings.Tap);
m_GestureRecognizer.TappedEvent += OnTappedEvent;
m_GestureRecognizer.StartCapturingGestures();
m_CapturingPhoto = false;
}
void Initialize()
{
Debug.Log("Initializing...");
List<Resolution> resolutions = new List<Resolution>(PhotoCapture.SupportedResolutions);
Resolution selectedResolution = resolutions[0];
m_CameraParameters = new CameraParameters(WebCamMode.PhotoMode);
m_CameraParameters.cameraResolutionWidth = selectedResolution.width;
m_CameraParameters.cameraResolutionHeight = selectedResolution.height;
m_CameraParameters.hologramOpacity = 0.0f;
m_CameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
m_Texture = new Texture2D(selectedResolution.width,selectedResolution.height,TextureFormat.BGRA32,false);
PhotoCapture.CreateAsync(false,OnCreatedPhotoCaptureObject);
}
void OnCreatedPhotoCaptureObject(PhotoCapture captureObject)
{
m_PhotoCaptureObj = captureObject;
m_PhotoCaptureObj.StartPhotoModeAsync(m_CameraParameters,true,OnStartPhotoMode);
}
void OnStartPhotoMode(PhotoCapture.PhotoCaptureResult result)
{
SetupGestureRecognizer();
Debug.Log("Ready!");
Debug.Log("Air Tap to take a picture.");
}
void OnTappedEvent(InteractionSourceKind source,int tapCount,Ray headRay)
{
if(m_CapturingPhoto)
{
return;
}
m_CapturingPhoto = true;
Debug.Log("Taking picture...");
m_PhotoCaptureObj.TakePhotoAsync(OnPhotoCaptured);
}
void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result,PhotoCaptureFrame photoCaptureFrame)
{
if(m_Canvas == null)
{
m_Canvas = GameObject.CreatePrimitive(PrimitiveType.Quad);
m_Canvas.name = "PhotoCaptureCanvas";
m_CanvasRenderer = m_Canvas.GetComponent<Renderer>() as Renderer;
m_CanvasRenderer.material = new Material(Shader.Find("AR/HolographicImageBlend"));
}
Matrix4x4 cameraToWorldMatrix;
photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;
Matrix4x4 projectionMatrix;
photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);
photoCaptureFrame.UploadImageDataToTexture(m_Texture);
m_Texture.wrapMode = TextureWrapMode.Clamp;
m_CanvasRenderer.sharedMaterial.SetTexture("_MainTex",m_Texture);
m_CanvasRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix",worldToCameraMatrix);
m_CanvasRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix",projectionMatrix);
m_CanvasRenderer.sharedMaterial.SetFloat("_VignetteScale", 1.0f);
// Position the canvas object slightly in front
// of the real world web camera.
Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);
// Rotate the canvas object so that it faces the user.
Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2),cameraToWorldMatrix.GetColumn(1));
m_Canvas.transform.position = position;
m_Canvas.transform.rotation = rotation;
Debug.Log("Took picture!");
m_CapturingPhoto = false;
}
}
HolographicImageBlendShader.shader
Shader "AR/HolographicImageBlend"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_VignetteScale ("Vignette Scale", RANGE(0,2)) = 0
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
};
struct v2f
{
float4 vertexPositionInProjectionSpace : SV_POSITION;
float2 uv : TEXCOORD0;
float4 vertexInProjectionSpace : TEXCOORD1;
};
sampler2D _MainTex;
float4x4 _WorldToCameraMatrix;
float4x4 _CameraProjectionMatrix;
float _VignetteScale;
v2f vert (appdata v)
{
v2f o;
o.vertexPositionInProjectionSpace = mul(UNITY_MATRIX_MVP, v.vertex);
// Calculate the vertex position in world space.
float4 vertexPositionInWorldSpace = mul(unity_ObjectToWorld, float4(v.vertex.xyz,1));
// Now take the world space vertex position and transform it so that
// it is relative to the physical web camera on the HoloLens.
float4 vertexPositionInCameraSpace = mul(_WorldToCameraMatrix, float4(vertexPositionInWorldSpace.xyz,1));
// Convert our camera relative vertex into clip space.
o.vertexInProjectionSpace = mul(_CameraProjectionMatrix, float4(vertexPositionInCameraSpace.xyz, 1.0));
return o;
}
fixed4 frag (v2f i) : SV_Target
{
// Transform the vertex into normalized coordinate space. Basically
// we want to map where our vertex should be on the screen into the -1 to 1 range
// for both the x and y axes.
float2 signedUV = i.vertexInProjectionSpace.xy / i.vertexInProjectionSpace.w;
// The HoloLens uses an additive display so the color black will
// be transparent. If the texture is smaller than the canvas, color the extra
// area on the canvas black so it will be transparent on the HoloLens.
if(abs(signedUV.x) > 1.0 || abs(signedUV.y) > 1.0)
{
return fixed4( 0.0, 0.0, 0.0, 0.0);
}
// Currently our signedUV's x and y coordinates will fall between -1 and 1.
// We need to map this range from 0 to 1 so that we can sample our texture.
float2 uv = signedUV * 0.5 + float2(0.5, 0.5);
fixed4 finalColor = tex2D(_MainTex, uv);
// Finally add a circular vignette effect starting from the center
// of the image.
finalColor *= 1.0-(length(signedUV) * _VignetteScale);
return finalColor;
}
ENDCG
}
}
}



