Hi,
How does one correctly pass a list of lists to a parallel for job to be used by each parallel?
Seeing as you can’t have structs containing native collections or have 2d native collections or am i just missing something?
Below is just a sample job to show my problem, trying to pass an array of structs containing an array to the parallel job, but if I do that i get some error about its not bitable or something
using System.Collections;
using System.Collections.Generic;
using Unity.Collections;
using Unity.Jobs;
using UnityEngine;
public class TestJobScript : MonoBehaviour
{
[System.Serializable]
struct JobValues
{
public int value;
public int other_value;
public NativeArray<MoreValues> more_values_array;
}
[System.Serializable]
struct MoreValues
{
public int value;
public int other_value;
public MoreValues(int value, int other_value)
{
this.value = value;
this.other_value = other_value;
}
}
[System.Serializable]
struct JobResult
{
public int result;
public int other_result;
public JobResult(int result, int other_result)
{
this.result = result;
this.other_result = other_result;
}
}
SampleParallelJob sample_job;
JobHandle job_handle;
NativeArray<JobValues> job_values;
NativeMultiHashMap<int, JobResult> job_results;
void SampleJob()
{
//set up
job_values = new NativeArray<JobValues>(10, Allocator.TempJob);
job_results = new NativeMultiHashMap<int, JobResult>(10 * 10, Allocator.TempJob);
//get the data
for (int i = 0; i < 10; i++)
{
var values = new JobValues();
values.value = 1;
values.other_value = 10;
values.more_values_array = new NativeArray<MoreValues>(10, Allocator.TempJob);
for (int j = 0; j < 10; j++)
{
values.more_values_array[j] = new MoreValues(1,2);
}
job_values[i] = values;
}
//create the job
sample_job = new SampleParallelJob();
sample_job.job_values = job_values;
sample_job.job_results = job_results.ToConcurrent();
//start job
job_handle = sample_job.Schedule(job_values.Length, 1);
//job complete
job_handle.Complete();
//handle results
var results = job_results.GetValueArray(Allocator.Temp);
for (int i = 0; i < results.Length; i++)
{
var temp = results[i].result;
//process results etc...
}
//clean up
results.Dispose();
job_values.Dispose();
job_results.Dispose();
}
struct SampleParallelJob : IJobParallelFor
{
[ReadOnly]
public NativeArray<JobValues> job_values;
[WriteOnly]
public NativeMultiHashMap<int, JobResult>.Concurrent job_results;
public void Execute(int index)
{
var job_value = job_values[index];
for (int i = 0; i < 10; i++)
{
var result = job_value.more_values_array[i].other_value;
var other_result = job_value.more_values_array[i].value;
job_results.Add(index, new JobResult(result, other_result));
}
job_value.more_values_array.Dispose();
}
}
}