Has anyone tried this yet
@ina here ya go, have fun!
This guy has the best arkit-unity example set so far
https://github.com/realityenhanced/ARKitExperiments
Be sure to read the readme. There is an extra step with opencv.
We are using ARKit (and ARCore) with OpenCV - biggest issue is extracting the image buffer from the Webcam Texture to pass to a CV mat without incurring too much of a cost (given whatever you do with OpenCV then adds further compute cost).
are you guys talking about OpenCV or OpenCV For Unity? OpenCV For Unity is a plugin on asset store
the github linked seems to be a custom wrap. opencvforunity is good for prototyping but has a lot of performance issues :-\
I’ve got the same challenge. Am using the ARTextureHandles tp get the IntPtr to the camera feed textures, but my OpenCV Mats render as black. Any ideas welcome! The code is:
using System;
using UnityEngine;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using UnityEngine.UI;
using UnityEngine.XR.iOS;
public class ProcessVideo : MonoBehaviour
{
Texture2D texture;
Texture2D videoTextureY;
RawImage rawImage;
Resolution currentResolution;
bool faceDetectionRunning;
public void StartFaceDetection()
{
faceDetectionRunning = true;
}
public void StopFaceDetection()
{
faceDetectionRunning = false;
}
void Start()
{
currentResolution = Screen.currentResolution;
rawImage = gameObject.GetComponent();
texture = new Texture2D(currentResolution.width, currentResolution.height, TextureFormat.RGBA32, false);
rawImage.texture = texture;
}
void Update()
{
if (faceDetectionRunning == true)
{
#if !UNITY_EDITOR && UNITY_IOS
Resolution currentResolution = Screen.currentResolution;
ARTextureHandles handles = UnityARSessionNativeInterface.GetARSessionNativeInterface().GetARVideoTextureHandles();
if(handles.TextureY !=null){
ProcessARVideoTexture((System.IntPtr)handles.TextureY);
}
#endif
}
}
private void ProcessARVideoTexture(IntPtr pointerY)
{
//Running the following 4 lines results in the correct video texture being applied to the gameObject’s RawImage texture, so we know the pointer is right.
//videoTextureY = Texture2D.CreateExternalTexture(currentResolution.width, currentResolution.height, TextureFormat.RGBA32, false, false, pointerY);
//videoTextureY.filterMode = FilterMode.Bilinear;
//videoTextureY.wrapMode = TextureWrapMode.Repeat;
// rawImage.texture = videoTextureY;
Size size = new Size(currentResolution.width, currentResolution.height);
//PROBLEM: The following 12 lines of code render a black texture with some static-like lines on it to the gameObject’s RawImage texture. I need the rgbaMat to be the TextureY from the camera feed. Please help!
using (Mat rgbaMat = new Mat(size, CvType.CV_8UC4, new Scalar(0, 0, 0, 255)))
{
using (Mat singleChannelMat = new Mat(size, CvType.CV_8UC1))
{
Utils.copyToMat(pointerY, singleChannelMat);
Imgproc.cvtColor(singleChannelMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);
Utils.fastMatToTexture2D(rgbaMat, texture);
}
}
}
}