Sending webcam image from python to Meta Quest 2

Hello everyone,

I’m working on an application for my Meta Quest 2 where I want to add images from a webcam as a texture to a plane. To achieve this, I have created a Python script that uses OpenCV to capture images from the camera. These images are then base64-encoded and sent to Meta Quest 2 via sockets. However, I’m having a problem: if I run both applications (Python and Unity) on the same machine, Unity can add the images as a texture to the plane without the issues. But when I try to do the same from Meta Quest 2, the blueprint appears black, and the submitted images are not textured in the plane.

If anyone knows how to fix this problem, I would really appreciate your help. Attached are the codes I am using:

Python:

import base64
import pickle
import socket
import cv2
import imutils
import time
import threading
import sys

cam = cv2.VideoCapture(0)

# encode to jpeg format
# encode param image quality 0 to 100. default:95
# if you want to shrink data size, choose low image quality.
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 90]

# Create a TCP/IP socket
sock_image = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

# Bind the socket to the port
server_data = ('192.168.1.102', 8052)
server_image = ('192.168.1.102', 8053)

print('starting up on {} port {}'.format(*server_data))
sock_data.bind(server_data)

print('starting up on {} port {}'.format(*server_image))
sock_image.bind(server_image)

# Listen for incoming connections
sock_data.listen(2)
sock_image.listen(2)


# Custom Thread Class
class ImageThread(threading.Thread):
    def task_send_image(self):
        cam_img = True
        while True:
            # Wait for a connection
            print('waiting for a connection')
            connection_img, client_address_img = sock_image.accept()
            # t2.start()

            try:
                print('connection from', client_address_img)

                # Receive the data in small chunks and retransmit it
                while True:
                    if cam_img:
                        ret, frame = cam.read()
                        frame = imutils.resize(frame, width=320)
                        frame = cv2.flip(frame, 0)
                        result, image = cv2.imencode('.jpg', frame, encode_param)
                        jpg_as_text = base64.b64encode(image)

                    else:
                        frame = cv2.imread('robotImage.jpg')
                        frame = imutils.resize(frame, width=320)
                        frame = cv2.flip(frame, 0)
                        result, image = cv2.imencode('.jpg', frame, encode_param)
                        jpg_as_text = base64.b64encode(image)

                    # connection_img.sendall(jpg_as_text)
                    connection_img.sendto(jpg_as_text, client_address_img)
                    time.sleep(0.2)

            except KeyboardInterrupt:
                connection_img.close()
            finally:
                # Clean up the connection
                connection_img.close()
            raise Exception('Something bad happened')

    def run(self):
        self.task_send_image()

    def join(self):
        threading.Thread.join(self)
# Driver function
def main():
    image_thread = ImageThread()

    image_thread.start()


# Driver code
if __name__ == '__main__':
    main()

C# Code:

using System;
using System.Collections;
using System.Collections.Generic;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using UnityEngine;
using UnityEngine.XR;
using UnityEngine.XR.Interaction.Toolkit;
using System.Collections.Generic;
using System.Threading;
using UnityEngine;
using System;

public class TCPTestClient : MonoBehaviour
{

    public String Server_Addres = "127.0.0.1";
    public int DataPort = 8052;
    public int ImagePort = 8053;

    string serverMessage;

    public GameObject renderImage;

    byte[] bytes_image;

    private bool serverComm = false;

    #region private members    
    private TcpClient socketConnectionData;
    private TcpClient socketConnectionImage;
    private Thread clientReceiveThread;
    #endregion

    Renderer renderer;
    // Use this for initialization    
    void Start()
    {
        renderer = GetComponent<Renderer>();
        ConnectImageServer();

    }
    // Update is called once per frame
    void Update()
    {
        setImageAsTexture();
    }

    /// <summary>    
    /// Setup socket connection.    
    /// </summary>    
    private void ConnectDataServer()
    {
        try
        {
            socketConnectionData = new TcpClient(Server_Addres, DataPort);
        }
        catch (Exception e)
        {
            serverComm = false;
            Debug.Log("On client connect exception " + e);
        }
    }

    /// <summary>    
    /// Setup socket connection.    
    /// </summary>    
    private void ConnectImageServer()
    {
        try
        {
            socketConnectionImage = new TcpClient(Server_Addres, ImagePort);

            clientReceiveThread = new Thread(new ThreadStart(ListenForData));
            clientReceiveThread.IsBackground = true;
            clientReceiveThread.Start();
            serverComm = true;
        }
        catch (Exception e)
        {
            serverComm = false;
            Debug.Log("On client connect exception " + e);
        }
    }

    private void setImageAsTexture()
    {
        if (serverMessage != null && serverComm == true)
        {
            Texture2D tex = new Texture2D(100, 100, TextureFormat.BGRA32, true, false);
            tex.hideFlags = HideFlags.HideAndDontSave;
            tex.filterMode = FilterMode.Point;
            tex.LoadImage(bytes_image);
            renderImage.GetComponent<Renderer>().material.mainTexture = tex;

        }
    }
    public static byte[] DecodeUrlBase64(string s)
    {
        s = s.Replace('-', '+').Replace('_', '/').PadRight(4 * ((s.Length + 3) / 4), '=');
        return Convert.FromBase64String(s);
    }

}

I appreciate your help.

Hi,
I try to accomplish something similar. How exactly did you make it work? I send my webcam feed to Unity and try to display it on a Texture, similar to your code, but it is not working.

hey were you able to do it i am stuck at same point

Hi, I have the same problem, were you able to do it or what did you do?