Calculating true pixel intensities in image?

Hi,

In short I am working on developing something that can use the HDRP and ray-tracing to take images of a GEO-satellite within the unity scene (things positioned in the right places). I am up to the point of taking images of the model target but I am finding that the target is being saturated (max value of 1) and that pixel intensities are only clamped between 0-1.

I’ve tried adjusting the settings of the camera, intensity of the Sun but to no avail.

Here is the script I am using to capture the images and read out the pixel data to binary format :

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using System.IO;

public class Image_Capture : MonoBehaviour
{
    [Header("Capture Settings")]
    public Camera captureCamera;
    private RenderTexture captureRT;

    [Header("Simulation Settings")]
    public float totalSimulationTime = 300f;
    public float captureInterval = 1f;

    private List<Texture2D> capturedImages = new List<Texture2D>();
    private List<float> timeStamps = new List<float>();
    private bool isCapturing = true;
    private string imagesFolderPath;

    void Start()
    {
        if (captureRT == null)
        {
            captureRT = new RenderTexture(1920, 1080, 0, RenderTextureFormat.RFloat);
            captureRT.enableRandomWrite = true;
            captureRT.Create();
        }

        if (captureCamera != null)
        {
            captureCamera.targetTexture = captureRT;
        }
        else
        {
            Debug.LogError("Capture Camera is not assigned!");
            return;
        }


        StartCoroutine(CaptureImages());
    }

    IEnumerator CaptureImages()
    {
        float startTime = Time.time;
        int imageCounter = 0;

        while (Time.time - startTime < totalSimulationTime)
        {
            yield return new WaitForSeconds(captureInterval);

            // Create a Texture2D with RFloat format (32-bit single channel)
            Texture2D tex = new Texture2D(captureRT.width, captureRT.height, TextureFormat.RFloat, true);
            RenderTexture currentRT = RenderTexture.active;
            RenderTexture.active = captureRT;
            tex.ReadPixels(new Rect(0, 0, captureRT.width, captureRT.height), 0, 0);
            tex.Apply();
            RenderTexture.active = currentRT;

            capturedImages.Add(tex);
            timeStamps.Add(Time.time - startTime);

            // Save image in raw format
            string filename =  $"/Users/robertairey/Unity_Models/Raw_Images/Image_{imageCounter:D3}.raw";
            SaveRawImage(tex, filename);
            Debug.Log($"Saved image {imageCounter} to {filename}");
            imageCounter++;
        }

        isCapturing = false;
        Debug.Log("Image capture complete.");
    }

    void SaveRawImage(Texture2D tex, string filePath)
    {
        Color[] pixels = tex.GetPixels();
        float[] pixelData = new float[pixels.Length];

        for (int i = 0; i < pixels.Length; i++)
        {
            pixelData[i] = pixels[i].r; // Directly storing 32-bit floating-point grayscale values
        }

        byte[] byteData = new byte[pixelData.Length * sizeof(float)];
        System.Buffer.BlockCopy(pixelData, 0, byteData, 0, byteData.Length);
        File.WriteAllBytes(filePath, byteData);
    }
}

Any ideas or advice would be most appreciated.

The code seems ok for me, maybe look on the post processes that could explain the values clamping.
Like, try to fully disabled them on the camera frame settings to test.

1 Like