Using WebCameraTexture with ARCamera causing issues

What I want to do in my AR App is have the option for the user to take a picture with his front-facing camera having a filter over it.

I managed to get it working by using this code

private bool camAvailable;
private WebCamTexture frontCam;
private Texture defaultBackground;
string selectedDeviceName = "";

private Quaternion baseRotation;

public RawImage background;
public AspectRatioFitter fit;
// Start is called before the first frame update

void Start()
{
    defaultBackground = background.texture;
    WebCamDevice[] devices = WebCamTexture.devices;
    baseRotation = transform.rotation;

    if (devices.Length == 0)
    {
        camAvailable = false;
        return;
    }

    foreach (var camDevice in devices)
    {
        if (camDevice.isFrontFacing)
        {
            selectedDeviceName = camDevice.name;
            frontCam = new WebCamTexture(selectedDeviceName, Screen.width, Screen.height);
        }
    }


    if (frontCam == null)
    {
        Debug.Log("Unable to find front camera");
        return;
    }

    frontCam.Play();
    background.texture = frontCam;

    camAvailable = true;
}

// Update is called once per frame
void Update()
{
    if (!camAvailable)
    {
        return;
    }

    float ratio = (float)frontCam.width / (float)frontCam.height;
    fit.aspectRatio = ratio;

    float scaleY = frontCam.videoVerticallyMirrored ? -1f : 1f;
    background.rectTransform.localScale = new Vector3(1, scaleY, 1);

    int orient = -frontCam.videoRotationAngle;
    background.rectTransform.localEulerAngles = new Vector3(0, 0, orient);
}

However, when I try to use it in conjunction with ARCore / ARKit the ARCamera goes completely black and doesn’t seem to do anything.

Is there a way to combine these two options or not?

Any help is kindly appreciated.

Both ARCore and ARKit require exclusive control of the camera when running. Thus, you cannot simultaneously WebCameraTexture with ARCore/ARKit actively running.