How to pass Array/Lists to a IJobParallelFor job?

Hi,

How does one correctly pass a list of lists to a parallel for job to be used by each parallel?

Seeing as you can’t have structs containing native collections or have 2d native collections or am i just missing something?

Below is just a sample job to show my problem, trying to pass an array of structs containing an array to the parallel job, but if I do that i get some error about its not bitable or something

using System.Collections;
using System.Collections.Generic;
using Unity.Collections;
using Unity.Jobs;
using UnityEngine;

public class TestJobScript : MonoBehaviour
{
    [System.Serializable]
    struct JobValues
    {
        public int value;
        public int other_value;
        public NativeArray<MoreValues> more_values_array;
    }

    [System.Serializable]
    struct MoreValues
    {
        public int value;
        public int other_value;

        public MoreValues(int value, int other_value)
        {
            this.value = value;
            this.other_value = other_value;
        }
    }

    [System.Serializable]
    struct JobResult
    {
        public int result;
        public int other_result;

        public JobResult(int result, int other_result)
        {
            this.result = result;
            this.other_result = other_result;
        }
    }


    SampleParallelJob sample_job;
    JobHandle job_handle;
    NativeArray<JobValues> job_values;
    NativeMultiHashMap<int, JobResult> job_results;

    void SampleJob()
    {
        //set up
        job_values = new NativeArray<JobValues>(10, Allocator.TempJob);
        job_results = new NativeMultiHashMap<int, JobResult>(10 * 10, Allocator.TempJob);

        //get the data
        for (int i = 0; i < 10; i++)
        {
            var values = new JobValues();
            values.value = 1;
            values.other_value = 10;
            values.more_values_array = new NativeArray<MoreValues>(10, Allocator.TempJob);

            for (int j = 0; j < 10; j++)
            {
                values.more_values_array[j] = new MoreValues(1,2);
            }

            job_values[i] = values;
        }

        //create the job
        sample_job = new SampleParallelJob();
        sample_job.job_values = job_values;
        sample_job.job_results = job_results.ToConcurrent();

        //start job
        job_handle = sample_job.Schedule(job_values.Length, 1);

        //job complete
        job_handle.Complete();

        //handle results
        var results = job_results.GetValueArray(Allocator.Temp);
        for (int i = 0; i < results.Length; i++)
        {
            var temp = results[i].result;
            //process results etc...
        }

        //clean up
        results.Dispose();
        job_values.Dispose();
        job_results.Dispose();
    }

    struct SampleParallelJob : IJobParallelFor
    {
        [ReadOnly]
        public NativeArray<JobValues> job_values;

        [WriteOnly]
        public NativeMultiHashMap<int, JobResult>.Concurrent job_results;

        public void Execute(int index)
        {
            var job_value = job_values[index];

            for (int i = 0; i < 10; i++)
            {
                var result = job_value.more_values_array[i].other_value;
                var other_result = job_value.more_values_array[i].value;
                job_results.Add(index, new JobResult(result, other_result));

            }

            job_value.more_values_array.Dispose();
        }
    }
}

To do a 2D array really just allocating an 1D array with the “area” of the 2D Array. So instead of having array[×][y] you’d have something like array[x+y*x] or something similar. You’d encode all of it into 1 dimension. It is non-ideal, but it is how most architectures work.

You unfortunately, can not pass a NativeArray<NativeArray> nor a NativeArray2D; though you could probably create one as a helper using the NativeContainer structures as a guide.

2 Likes

thank you, I have a workable solution now, its not perfect but it will do

I am adding all my MoreValues (y’s*) to a NativeList and storing the start_index and length in the JobValues struct, then in the job it loops through just the part of the list that is for that index

Updated sample parallel job

using System.Collections;
using System.Collections.Generic;
using Unity.Collections;
using Unity.Jobs;
using UnityEngine;

public class TestJobScript : MonoBehaviour
{
    [System.Serializable]
    struct JobValues
    {
        public int value;
        public int other_value;
        public int more_values_array_start_index;
        public int more_values_array_length;
    }

    [System.Serializable]
    struct MoreValues
    {
        public int value;
        public int other_value;

        public MoreValues(int value, int other_value)
        {
            this.value = value;
            this.other_value = other_value;
        }
    }

    [System.Serializable]
    struct JobResult
    {
        public int result;
        public int other_result;

        public JobResult(int result, int other_result)
        {
            this.result = result;
            this.other_result = other_result;
        }
    }

    SampleParallelJob sample_job;
    JobHandle job_handle;
    NativeArray<JobValues> job_values;
    NativeList<MoreValues> more_values;
    NativeMultiHashMap<int, JobResult> job_results;

    void SampleJob()
    {
        //set up
        job_values = new NativeArray<JobValues>(10, Allocator.TempJob);
        job_results = new NativeMultiHashMap<int, JobResult>(10 * 10, Allocator.TempJob);
        more_values = new NativeList<MoreValues>(Allocator.TempJob);

        //get the data
        for (int i = 0; i < 10; i++)
        {
            var values = new JobValues();
            values.value = 1;
            values.other_value = 10;
            values.more_values_array_start_index = i * 10;
            values.more_values_array_length = 10;

            for (int j = 0; j < 10; j++)
            {
                more_values.Add(new MoreValues(1,2));
            }

            job_values[i] = values;
        }

        //create the job
        sample_job = new SampleParallelJob();
        sample_job.job_values = job_values;
        sample_job.more_values = more_values;
        sample_job.job_results = job_results.ToConcurrent();

        //start job
        job_handle = sample_job.Schedule(job_values.Length, 1);

        //job complete
        job_handle.Complete();

        //handle results
        var results = job_results.GetValueArray(Allocator.Temp);
        for (int i = 0; i < results.Length; i++)
        {
            var temp = results[i].result;
            //process results etc...
        }

        //clean up
        results.Dispose();
        job_values.Dispose();
        more_values.Dispose();
        job_results.Dispose();
    }

    struct SampleParallelJob : IJobParallelFor
    {
        [ReadOnly]
        public NativeArray<JobValues> job_values;

        [ReadOnly]
        public NativeList<MoreValues> more_values;

        [WriteOnly]
        public NativeMultiHashMap<int, JobResult>.Concurrent job_results;

        public void Execute(int index)
        {
            var job_value = job_values[index];

            for (int i = job_value.more_values_array_start_index; i < job_value.more_values_array_start_index + job_value.more_values_array_length; i++)
            {
                var result = more_values[i].other_value;
                var other_result = more_values[i].value;
                job_results.Add(index, new JobResult(result, other_result));
            }
        }
    }
}