How to stretch photo from camera to look like video streaming from camera to screen

Hello everyone!
I’m making an app for IOS, want to make photo’s with app
I’m trying to scale photo from camera so it looks like the video is streaming on the screen before user took photo

The problem is, that the texture I get from camera is 19201440 when screen is 1792828
I don’t know, how to scale correctly
Thank you for any help!

Answer is:

public unsafe void GetScreenShot()
    {
        if (!cameraManager.TryGetLatestImage(out image))
        {
            return;
        }
        var format = TextureFormat.RGBA32;
       
        Texture2D texture = new Texture2D(image.width, image.height, format, false);

        var conversionParams = new XRCameraImageConversionParams {
            inputRect = new RectInt(0, 0, image.width, image.height),
            outputDimensions = new Vector2Int(image.width, image.height),
            outputFormat = TextureFormat.RGBA32,
            transformation = CameraImageTransformation.MirrorY
        };
        
        var rawTextureData = texture.GetRawTextureData<byte>();
        try
        {
            IntPtr ptr = new IntPtr(rawTextureData.GetUnsafePtr());
            image.Convert(conversionParams, ptr, rawTextureData.Length);
        }
        finally
        {
            // We must dispose of the XRCameraImage after we're finished
            // with it to avoid leaking native resources.
            image.Dispose();
        }
        // Apply the updated texture data to our texture
        texture.Apply();

        // Set the RawImage's texture so we can visualize it
        Fit.aspectRatio = (float) image.width / image.height;
        Fit.aspectMode = AspectRatioFitter.AspectMode.HeightControlsWidth;
        
        background.texture = texture;
        background.rectTransform.localEulerAngles = new Vector3(0, 0, 90);
        background.enabled = true;
    }