Search Unity

[SOLVED] Performance issues upgrading to latest version

Discussion in 'Data Oriented Technology Stack' started by floboc, May 2, 2019.

  1. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    Hello everyone,
    I spent the whole week updating my game code (about 90 systems) from the preview-18 to the latest preview-30 version of the Entities package. At the same time I also jobified almost all my systems.

    Unfortunateli, i was disapointed to see that performance was actually worst than when using the old [Inject] API with no jobs at all. Actually, when I was upgrading my code, there was a time when things looked brighter while some of my systems were still using the old API and others using jobs.

    I believe I made a mistake somewhere but cannot find it, so here I come for help :)

    I observed that sometimes one my system (not always the same), takes an incredibly huge amount of time while it mostly does nothing.

    This is what it looks like in the profiler:
    gc_collect_1.jpg You can see that the "AIStunSystem" takes about 17ms while workers are actually doing nothing at all.

    Looking at the hierarchy, this seems to be caused by "GC.Collect":
    gc_collect_2.jpg
    I also noticed that several of my systems allocate small amounts of garbace collected memory each frame.
    This seems to be caused by calls to "ToComponentDataArray" or "ToEntityArray".
    Is this expected?
    Do you think that most of my issues arise from too much allocation of garbage collected memory ?

    Here is a simplified version of the code of the AIStunSystem, with some added explanations:
    Code (CSharp):
    1. [UpdateInGroup(typeof(UpdateGroupInput))]
    2. [UpdateAfter(typeof(AIPathfinderSystem))]
    3. public class AIStunSystem : JobComponentSystem
    4. {
    5.     [BurstCompile]
    6.     struct Job : IJob
    7.     {
    8.         //Write
    9.         public NativeArray<PlayerInputData> input_array;
    10.  
    11.         //Read-only
    12.         public Entity entity;
    13.         public StunPowerData stun_power;
    14.  
    15.         //note: this is a struct containing several NativeArray or other NativeContainers
    16.         [ReadOnly] public AIHelperTmp helper;
    17.  
    18.         [DeallocateOnJobCompletion]
    19.         [ReadOnly] public NativeArray<Entity> targets;
    20.  
    21.         public void Execute()
    22.         {
    23.             bool use_power = true;
    24.            
    25.             //Some basic stuff using the job data
    26.  
    27.             if (use_power)
    28.             {
    29.                 PlayerInputData input = input_array[0];
    30.                 input.PowerAction = true;
    31.                 input_array[0] = input;
    32.             }
    33.         }
    34.     }
    35.  
    36.     private AIPrepareSystem prepare_system;
    37.     private AIPathfinderSystem pathfinder_system;
    38.  
    39.     private EntityQuery ai_group;
    40.     private EntityQuery target_group;
    41.  
    42.     private Dictionary<int, NativeArray<PlayerInputData>> input_arrays = new Dictionary<int, NativeArray<PlayerInputData>>();
    43.  
    44.     protected override void OnCreate()
    45.     {
    46.         prepare_system = World.GetOrCreateSystem<AIPrepareSystem>();
    47.         pathfinder_system = World.GetOrCreateSystem<AIPathfinderSystem>();
    48.  
    49.         ai_group = GetEntityQuery(
    50.                     ComponentType.ReadWrite<PlayerInputData>(),
    51.                     ComponentType.ReadOnly<AIData>(),
    52.                     ComponentType.ReadOnly<StunPowerData>());
    53.  
    54.         target_group = GetEntityQuery(
    55.                     ComponentType.ReadOnly<TargetData>());
    56.     }
    57.  
    58.     protected override JobHandle OnUpdate(JobHandle inputDeps)
    59.     {
    60.         var ai_entities = ai_group.ToEntityArray(Allocator.TempJob);
    61.         var ai_inputs = ai_group.ToComponentDataArray<PlayerInputData>(Allocator.TempJob);
    62.         var ai_powers = ai_group.ToComponentDataArray<StunPowerData>(Allocator.TempJob);
    63.  
    64.         JobHandle all_deps = inputDeps;
    65.  
    66.         for (int i = 0; i < ai_entities.Length; i++)
    67.         {
    68.             Entity entity = ai_entities[i];
    69.  
    70.             //The AIPrepareSystem stores one helper for each entity having an AIData component
    71.             //Because the helper contains several native containers that I cannot flatten, I have to schedule one job for each entity (max 7 jobs by design)
    72.             if (prepare_system.Helpers.ContainsKey(entity))
    73.             {
    74.                 //I use this trick to retrieve a modified version of PlayerInputData after each job is done...
    75.                 //because there is no way to have multiple jobs write to the same NativeArray ?
    76.                 var input_array = new NativeArray<PlayerInputData>(1, Allocator.TempJob);
    77.                 input_array[0] = ai_inputs[i];
    78.                 input_arrays[i] = input_array;
    79.  
    80.                 //Note: the helper object is last overriden by the AIPathfinderSystem
    81.                 //This sytem schedule as well one job per entity
    82.                 //So we use the entity-specific JobHandle as an additional dependency
    83.                 var job = new Job
    84.                 {
    85.                     helper = prepare_system.Helpers[entity],
    86.                     input_array = input_arrays[i],
    87.                     entity = entity,
    88.                     targets = target_group.ToEntityArray(Allocator.TempJob),
    89.                     stun_power = ai_powers[i]
    90.                 }.Schedule(JobHandle.CombineDependencies(inputDeps, pathfinder_system.Deps[entity]));
    91.  
    92.                 //We keep track of alld ependencies
    93.                 all_deps = JobHandle.CombineDependencies(all_deps, job);
    94.             }
    95.         }
    96.  
    97.         //Ensure jobs are complete so that we can retrieve update PlayerInputData
    98.         all_deps.Complete();
    99.  
    100.         //Copy data from jobs to the NativeArray and dispose
    101.         //I dislike this trick but couldn't do otherwise...
    102.         foreach (var it in input_arrays)
    103.         {
    104.             ai_inputs[it.Key] = it.Value[0];
    105.             it.Value.Dispose();
    106.         }
    107.         input_arrays.Clear();
    108.  
    109.         //Update data
    110.         ai_group.CopyFromComponentDataArray(ai_inputs);
    111.  
    112.         //Dispose
    113.         ai_entities.Dispose();
    114.         ai_inputs.Dispose();
    115.         ai_powers.Dispose();
    116.  
    117.         return all_deps;
    118.     }
    119. }
    Thank you for your help!
     
  2. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    If I disable Leak Detection AND JobsDebugger, then the gc memory allocation stops going crazy and I get descent performance again :)

    However, I still have some performance issues. The profiler looks very "sparse". For instance this is what I get for the same system as before:
    sparse_1.jpg

    Most blue areas are data gathering jobs (from unity engine).

    and actually my whole timeline looks like this at the exception of some systems:
    sparse_2.jpg
    Do you know what might cause this behaviour?
     
  3. recursive

    recursive

    Joined:
    Jul 12, 2012
    Posts:
    591
    A lot of EntityManager and EntityQuery functions/helpers GC allocate safety tracking objects in the editor. Try profiling a build and check if the performance is acceptable there for your system.

    You can definitely have a NativeArray that's written to by multiple threads IFF you know no two threads will write to the same range of elements, via [NativeDisableContainerSafetyRestrictionAttribute] on the job field attribute.

    EDIT: They renamed the attribute.

    There's also likely a better way to handle your logic and data structures that won't rely as much on mainthread timing. It's ok to use NativeHashMap and NativeMultiHashMap, and ComponentDataFromEntity if the logic isn't completely linear and requires lookup access.
     
    Last edited: May 3, 2019
  4. recursive

    recursive

    Joined:
    Jul 12, 2012
    Posts:
    591
    Those GatherChunksAndOffsetsJob(s) are the EntityManager building your component arrays, and probably the write-back as well. It runs jobs to walk the appropriate chunks and build a linear array out of them.

    You can actually get the JobHandle out of most of them (they'll have an overload that has an out JobHandle parameter) and put that as a dependency for your jobs, that may help with some of the fragmentation.
     
  5. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    Thank you for your answer, I will definitly test [DisableParallelForRestriction] int hat case.
    i actually thought that this was only working if concurrently writing from the same job (with a parallel for), but not for different job instances ?

    I use NativeMultiHashMap and ComponentDataFromEntity in many places of my code but didn't think to try it for writing from multiple jobs. I will take a look at this.

    Regarding the gathering jobs, do you have a code sample on how to get their handle? (I didn't find the method you mentioned)
     
  6. recursive

    recursive

    Joined:
    Jul 12, 2012
    Posts:
    591
    ToComponentDataArray(Allocator allocator, out JobHandle handle) doc entry.
    ToEntityArray(Allocator allocator, out JobHandle handle) doc entry.

    Also, if there's a good chance there may be frames without chunks of either query group, you can use the system's RequireForUpdate(EntityQuery query) when you setup in OnCreate and if there are no Ais or no targets, the system will skip it's OnUpdate call.

    While there's things in a state of flux, the entity package docs in general are a great resource now, instead of just the samples repo.
     
  7. recursive

    recursive

    Joined:
    Jul 12, 2012
    Posts:
    591
    There's also Concurrent versions of most NativeContainers now, including EntityCommandBuffer, NativeMultiHashMap. I think the only ones that don't have concurrent versions are NativeArray and NativeList. These concurrent versions allow write-only access and in ECBs case guarantee determinism.
     
  8. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    Thank you for all these information!

    By the way, I tried the [NativeDisableContainerSafetyRestrictionAttribute] to try to write to the same native array from different jobs but got an error saying that I had to add a dependency between these jobs. I can do so, but that will mean they won't run in parallel.

    I fear that this will be the same with concurrent version of native containers...
     
  9. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    In fact my biggest issue here is the "helper" object. This is what actually prevents me from running this from a normal job, like a parallel for or a foreach job.

    The AIHelper is quite a big struct that looks somethings like this:
    Code (CSharp):
    1. public struct AIHelperTmp
    2. {
    3.     public struct Settings
    4.     {
    5.         public float param1;
    6.         public float param2;
    7.         /*
    8.         I have about 30 fields here
    9.         */
    10.     };
    11.  
    12.     public Settings settings;
    13.  
    14.     public NativeList<MapCoords> all_map_positions;
    15.  
    16.     public CollisionMap player_collision_map;
    17.  
    18.     public Native2DArray<bool> map_is_hidden;
    19.     public Native2DArray<bool> map_can_drop;
    20.     public Native2DArray<float> map_delay;
    21.     public Native2DArray<float> map_will_fall;
    22.     public Native2DArray<int> map_reaching_count;
    23.     public Native2DDirectionalArray<ExplosionBarrier> map_barriers;
    24.     public NativeMultiHashMap<int, Entity> map_reaching;
    25.     public NativeMultiHashMap<int, Entity> map_entities;
    26.     public NativeMultiHashMap<int, EntityID> map_ids;
    27.     public NativeHashMap<Entity, float> delays;
    28.  
    29.     public Native2DArray<float> map_costs;
    30.     public NativeHashMap<int, float> entity_costs;
    31.  
    32.     public MapCoords pathfinder_from;
    33.     public Native2DArray<bool> pathfinder_explored;
    34.     public Native2DArray<float> pathfinder_cost_so_far;
    35.     public Native2DArray<int> pathfinder_length_so_far;
    36.     public Native2DArray<Direction> pathfinder_came_from;
    37.  
    38.     /*
    39.     And here I have any methods using the arrays above
    40.     /*
    41. }
    The MapCollision and Native2DArray are just simple struct that wrap a NativeArray but have additional methods to access them in a easy way.

    I need this as a single struct object because it contains methods that uses all these fiels together to provide information.

    The AIPrepareSystem has a dictionnary of helpers like so:
    public Dictionnary<Entity, AIHelper> Helpers;

    Each frame, the AIPrepareSystem updates the helpers objects, create native containers if required (note that some of these are shared between helpers, while others are specific).
    Then the AICostSystem and the AIPathfinderSystem perform additional computation on the data of the AIHelper objects.

    When AIPathfinderSystem jobs are complete, the AIHelper data will not change anymore. This is why I sent these job handles as dependency of my other systems.

    Now the issue is that I cannot give a NativeArray of AIHelper object to my job, since the AIHelper object contains NativeContainers. This is why I had to schedule one job for each AIHelper.
    If you know a way of passing all the AIHelper structs to a single job that would be the best solution :)

    Since I know I will have at most 8 of them, I already tryed an ugly trick using 8 AIHelper fields ((helper1, helper2 ,...) in the same job but then got some exception about the size of the object I was passing to the job...
     
  10. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    So I followed one of your advices and tried to avoid relying too much on the main thread (using complete() I suppose).
    This is how my code looks like now:

    Code (CSharp):
    1. using Unity.Entities;
    2. using Unity.Collections;
    3. using Unity.Burst;
    4. using Unity.Jobs;
    5. using System.Collections.Generic;
    6.  
    7. [UpdateInGroup(typeof(UpdateGroupInput))]
    8. [UpdateAfter(typeof(AIPathfinderSystem))]
    9. public class AIStunSystem : JobComponentSystem
    10. {
    11.     [BurstCompile]
    12.     struct Job : IJob
    13.     {
    14.         public int index;
    15.  
    16.         //Write
    17.         [NativeDisableParallelForRestriction]
    18.         public NativeArray<PlayerInputData> input_array;
    19.  
    20.         //Read-only
    21.         [ReadOnly] public NativeArray<StunPowerData> stun_powers;
    22.         [ReadOnly] public NativeArray<RotationData> rotations;
    23.         [ReadOnly] public NativeArray<PlayerData> players;
    24.  
    25.         [ReadOnly] public AIHelperTmp helper;
    26.  
    27.         [ReadOnly] public NativeArray<Entity> targets;
    28.         [ReadOnly] public NativeArray<MapPositionData> target_positions;
    29.         [ReadOnly] public NativeArray<PlayerData> target_players;
    30.  
    31.         [ReadOnly] public UpdateGroundSystem.LevelBoard level_board;
    32.  
    33.         public void Execute()
    34.         {
    35.             bool use_power = true;
    36.          
    37.             //There is some more irrelevant logic here...
    38.  
    39.             if (use_power)
    40.             {
    41.                 PlayerInputData input = input_array[index];
    42.                 input.Special = true;
    43.                 input_array[index] = input;
    44.             }
    45.         }
    46.     }
    47.  
    48.     [BurstCompile]
    49.     struct CleanupJob : IJobForEachWithEntity<PlayerInputData>
    50.     {
    51.         [DeallocateOnJobCompletion]
    52.         [ReadOnly] public NativeArray<Entity> entities;
    53.  
    54.         [DeallocateOnJobCompletion]
    55.         [ReadOnly] public NativeArray<PlayerInputData> inputs;
    56.  
    57.         [DeallocateOnJobCompletion]
    58.         [ReadOnly] public NativeArray<StunPowerData> stun_powers;
    59.  
    60.         [DeallocateOnJobCompletion]
    61.         [ReadOnly] public NativeArray<RotationData> rotations;
    62.  
    63.         [DeallocateOnJobCompletion]
    64.         [ReadOnly] public NativeArray<PlayerData> players;
    65.  
    66.         [DeallocateOnJobCompletion]
    67.         [ReadOnly] public NativeArray<Entity> targets;
    68.  
    69.         [DeallocateOnJobCompletion]
    70.         [ReadOnly] public NativeArray<MapPositionData> target_positions;
    71.  
    72.         [DeallocateOnJobCompletion]
    73.         [ReadOnly] public NativeArray<PlayerData> target_players;
    74.  
    75.         public void Execute(Entity entity, int index, ref PlayerInputData input)
    76.         {
    77.             //Write modified data
    78.             for (int i = 0; i < entities.Length; i++)
    79.             {
    80.                 if (entities[i] == entity)
    81.                 {
    82.                     input = inputs[i];
    83.                     return;
    84.                 }
    85.             }
    86.         }
    87.     }
    88.  
    89.     private AIPrepareSystem prepare_system;
    90.     private AIPathfinderSystem pathfinder_system;
    91.     private UpdateGroundSystem ground_system;
    92.  
    93.     private EntityQuery ai_group;
    94.     private EntityQuery target_group;
    95.  
    96.     protected override void OnCreate()
    97.     {
    98.         prepare_system = World.GetOrCreateSystem<AIPrepareSystem>();
    99.         pathfinder_system = World.GetOrCreateSystem<AIPathfinderSystem>();
    100.         ground_system = World.GetOrCreateSystem<UpdateGroundSystem>();
    101.  
    102.         ai_group = GetEntityQuery(
    103.                     ComponentType.ReadWrite<PlayerInputData>(),
    104.                     ComponentType.ReadOnly<AIData>(),
    105.                     ComponentType.ReadOnly<PlayerData>(),
    106.                     ComponentType.ReadOnly<StunPowerData>(),
    107.                     ComponentType.ReadOnly<RotationData>(),
    108.                     ComponentType.ReadOnly<MapPositionData>(),
    109.                     ComponentType.Exclude<JumpData>(),
    110.                     ComponentType.Exclude<StunnedData>(),
    111.                     ComponentType.Exclude<FrozenData>());
    112.  
    113.         target_group = GetEntityQuery(
    114.                     ComponentType.ReadOnly<PlayerData>(),
    115.                     ComponentType.ReadOnly<MapPositionData>(),
    116.                     ComponentType.Exclude<InvisibleData>(),
    117.                     ComponentType.Exclude<JumpData>());
    118.     }
    119.  
    120.     protected override JobHandle OnUpdate(JobHandle inputDeps)
    121.     {
    122.         var ai_entities = ai_group.ToEntityArray(Allocator.TempJob,);
    123.         var ai_inputs = ai_group.ToComponentDataArray<PlayerInputData>(Allocator.TempJob);
    124.         var ai_players = ai_group.ToComponentDataArray<PlayerData>(Allocator.TempJob);
    125.         var ai_powers = ai_group.ToComponentDataArray<StunPowerData>(Allocator.TempJob);
    126.         var ai_rotations = ai_group.ToComponentDataArray<RotationData>(Allocator.TempJob);
    127.  
    128.         var target_positions = target_group.ToComponentDataArray<MapPositionData>(Allocator.TempJob);
    129.         var target_players = target_group.ToComponentDataArray<PlayerData>(Allocator.TempJob);
    130.         var targets = target_group.ToEntityArray(Allocator.TempJob);
    131.  
    132.         JobHandle all_deps = inputDeps;
    133.  
    134.         for (int i = 0; i < ai_entities.Length; i++)
    135.         {
    136.             Entity entity = ai_entities[i];
    137.  
    138.             if (prepare_system.Helpers.ContainsKey(entity))
    139.             {
    140.                 var job = new Job
    141.                 {
    142.                     index = i,
    143.                     time = UnityEngine.Time.time,
    144.                     stun_duration = GameBootstrap.Settings.StunPowerDuration,
    145.                     helper = prepare_system.Helpers[entity],
    146.                     input_array = ai_inputs,
    147.                     players = ai_players,
    148.                     targets = targets,
    149.                     target_positions = target_positions,
    150.                     level_board = ground_system.Board,
    151.                     target_players = target_players,
    152.                     rotations = ai_rotations,
    153.                     stun_powers = ai_powers
    154.                 }.Schedule(JobHandle.CombineDependencies(all_deps, pathfinder_system.Deps[entity]));
    155.  
    156.                 all_deps = JobHandle.CombineDependencies(all_deps, job);
    157.             }
    158.         }
    159.  
    160.         //Write & Dispose data
    161.         var cleanup_job = new CleanupJob
    162.         {
    163.             entities = ai_entities,
    164.             targets = targets,
    165.             target_players = target_players,
    166.             target_positions = target_positions,
    167.             players = ai_players,
    168.             rotations = ai_rotations,
    169.             stun_powers = ai_powers,
    170.             inputs = ai_inputs
    171.         }.Schedule(this, all_deps);
    172.  
    173.         all_deps = JobHandle.CombineDependencies(all_deps, cleanup_job);
    174.  
    175.         return all_deps;
    176.     }
    177. }
    In short, each job now depends on the previous one since it writes to the same NativeArray (even using the safety attribute didn't allow to have them run in parallel). Also, each job now access the same readonly native arrays instead of using ToComponentDataArray for each job (althought these might be cached in some way?)
    And there is an additional job, called CleanupJob, which is responsible for deallocating data and updating modified data, that is run after all jobs are done.

    In the profiler this job now looks like this:
    no_complete2.jpg

    As you can see there is no improvement, and performance is even worse now that my jobs (7 in total here) cannot run in parallel. You can see them running in sequence at the end. I guess the best would be to find a way to only have to schedule 1 job instead of 7, but I have to solve the AIHelper issue before (see my post above).

    I also tried using the data gathering handles as additional dependencies for the job.It slightly reorder the timing of the data gathering job but did not lead to any performance improvement.

    Also, I am not sure why the JobComponentSystem is running until all jobs are done since I didn't use any complete() call.
     
    Last edited: May 3, 2019
  11. snacktime

    snacktime

    Joined:
    Apr 15, 2013
    Posts:
    2,375
    Your jobs are sequential because you are scheduling them all against the same job handle sequentially. You need to create a native array to hold the job handles that schedule returns, and then combine that array of jobhandles all at once after scheduling them all. There is an overload for combine dependencies that takes a native array/slice.

    But ToComponentDataArray is not what you want really, you should be moving to the new job types here. Like manual iteration using GetArchetypeChunkComponentType and CreateArchetypeChunkArray. The docs cover all this fairly well. Ideally you really need to abstract stuff out more but working with what you have the idiomatic manual iteration should work ok.
     
  12. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    Yes, I know they are sequential because i schedule them to be sequential. I had to do it because they all write to the same NativeArray (but at different indexes) and using [NativeDisableParallelForRestriction] did not help (I still got an exception saying that I should add a dependeny between them since they write to the same NativeArray).

    In the meaning time, I also made a non-jobified version like this:
    Code (CSharp):
    1. [UpdateInGroup(typeof(UpdateGroupInput))]
    2. [UpdateAfter(typeof(AIPathfinderSystem))]
    3. public class AIStunSystem : ComponentSystem
    4. {
    5.     private AIPrepareSystem prepare_system;
    6.     private AIPathfinderSystem pathfinder_system;
    7.     private UpdateGroundSystem ground_system;
    8.  
    9.     private EntityQuery ai_group;
    10.     private EntityQuery target_group;
    11.  
    12.     private NativeArray<Entity> targets;
    13.     private NativeArray<MapPositionData> target_positions;
    14.     private NativeArray<PlayerData> target_players;
    15.  
    16.     protected override void OnCreate()
    17.     {
    18.         prepare_system = World.GetOrCreateSystem<AIPrepareSystem>();
    19.         pathfinder_system = World.GetOrCreateSystem<AIPathfinderSystem>();
    20.         ground_system = World.GetOrCreateSystem<UpdateGroundSystem>();
    21.  
    22.         ai_group = GetEntityQuery(
    23.                     ComponentType.ReadWrite<PlayerInputData>(),
    24.                     ComponentType.ReadOnly<AIData>(),
    25.                     ComponentType.ReadOnly<PlayerData>(),
    26.                     ComponentType.ReadOnly<StunPowerData>(),
    27.                     ComponentType.ReadOnly<RotationData>(),
    28.                     ComponentType.ReadOnly<MapPositionData>(),
    29.                     ComponentType.Exclude<JumpData>(),
    30.                     ComponentType.Exclude<StunnedData>(),
    31.                     ComponentType.Exclude<FrozenData>());
    32.  
    33.         target_group = GetEntityQuery(
    34.                     ComponentType.ReadOnly<PlayerData>(),
    35.                     ComponentType.ReadOnly<MapPositionData>(),
    36.                     ComponentType.Exclude<InvisibleData>(),
    37.                     ComponentType.Exclude<JumpData>());
    38.     }
    39.  
    40.     protected override void OnStopRunning()
    41.     {
    42.         if (targets.IsCreated)
    43.         {
    44.             targets.Dispose();
    45.             target_players.Dispose();
    46.             target_positions.Dispose();
    47.         }
    48.     }
    49.  
    50.     protected override void OnUpdate()
    51.     {
    52.         if (targets.IsCreated)
    53.         {
    54.             targets.Dispose();
    55.             target_players.Dispose();
    56.             target_positions.Dispose();
    57.         }
    58.  
    59.         targets = target_group.ToEntityArray(Allocator.TempJob);
    60.         target_players = target_group.ToComponentDataArray<PlayerData>(Allocator.TempJob);
    61.         target_positions = target_group.ToComponentDataArray<MapPositionData>(Allocator.TempJob);
    62.  
    63.         Entities.With(ai_group).ForEach( (Entity entity, ref PlayerInputData input, ref StunPowerData stun_power, ref RotationData rotation, ref MapPositionData map_pos, ref PlayerData player) =>
    64.         {
    65.             if (prepare_system.Helpers.ContainsKey(entity))
    66.             {
    67.                 AIHelperTmp helper = prepare_system.Helpers[entity];
    68.  
    69.                 //Some irrelevant logic here that can be simplified as:
    70.                 bool use_power = false;
    71.                 for (int i = 0; i < 5; i++)
    72.                 {
    73.                     if (SomeFunction())
    74.                     {
    75.                           use_power = true;
    76.                           break;
    77.                     }
    78.                 }
    79.  
    80.                 if (use_power)
    81.                 {
    82.                     input.Special = true;
    83.                 }
    84.             }
    85.         });
    86.     }
    87.  
    88.    public bool SomeFunction()
    89.    {
    90.       //Some logic involving the native arrays targets, target_positions and target_players
    91.       return true;
    92.    }
    93. }
    And it runs faster:
    no_job.jpg

    However, it generates a small amount of gc memory (where?), so this is not ideal.
    I will look into GetArchetypeChunkComponentType as you suggested.

    About abstracting stuff more, do you have something particular in mind?
    Thank you for your help!
     
    Last edited: May 3, 2019
  13. Joachim_Ante

    Joachim_Ante

    Unity Technologies

    Joined:
    Mar 16, 2005
    Posts:
    4,671
    Just because code is sequential doesn't mean it shouldn't be jobified. (Parallelism & jobification is not the same thing)

    You can use IJob instead of IJobParallelFor if something isn't paralellizable.

    Also IJobForEach support ScheduleSingle which effectively schedules the IJobForEach on a single job, thus allowing you to freely access any elements in an array, resizing a list etc.

    In your specific case, I think simply making struct Job : IJobForEach<>

    and using ScheduleSingle seems like the right approach and would significantly simplify your code. It seems like 90% of your code is just moving arrays around and as far as i can see there is no reason for doing any of that.
     
    Last edited: May 3, 2019
  14. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    You are right about jobs, I just wanted some base for my benchmarks (as I expect to have at least identical performance with jobified work than with the simple non-jobified approach).

    Actually most of my game logic (which is not shown here) uses IJobForEach jobs, some of which using Schedule and other using ScheduleSingle depending on what they do.

    Here I just showed the AI-related stuff which indeed moves arrays a lot because I cannot find a way to pass a list of AIHelper objects to a job, since this is a struct that contains several native containers (see my previous posts).

    Maybe the best solution here would be to find a way to flatten all these objects.

    By the way, what is the proper way to iterate over two entity groups with jobs ?
    In my case I often have an AI and a TARGET group.
    For each entity in the AI group I might require iterating over the TARGET group.
    Currently, I handle this by using a IJobForEach on the AI group, and passing the TARGET group data as native arrays.
    Is there a better way?
     
  15. Joachim_Ante

    Joachim_Ante

    Unity Technologies

    Joined:
    Mar 16, 2005
    Posts:
    4,671
    Yes. It seems the root of the problem is the Dictionary<int, NativeArray<...> > that is very much not idiomatic ECS code and seems to be what makes everything complicated.

    Using DynamicBuffer instead should solve it.
     
  16. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    Definitly, I am currently trying to achieve the same objective with dynamic buffers.
    By the wy what is the recommend max size of a dynamic buffer?

    Thank you for your help. I will post updates.
     
  17. Joachim_Ante

    Joachim_Ante

    Unity Technologies

    Joined:
    Mar 16, 2005
    Posts:
    4,671
    There is no limit other than what Malloc can allocate.
     
  18. ReadyPlayGames

    ReadyPlayGames

    Joined:
    Jan 24, 2015
    Posts:
    41
    That sounds like an inspirational quote. Follow your dreams!
     
    siggigg, FROS7 and Abbrew like this.
  19. floboc

    floboc

    Joined:
    Oct 31, 2017
    Posts:
    89
    Haha right! :)

    I followed all the advices given here:
    - I used DynamicBuffer to store my AI data (pathfinding and other stuff) instead of my previous AIHelper object, hich allowed me to use IJobForEach (which is ran in parallel) instead of scheduling multiple jobs.
    - I used chunk iteration rather than ToComponentDataArray to iterate over my targets
    - I used the handle returned by CreateArchetypeChunkArray as additional dependency to relieve the main thread

    I am now very happy with the result (and Burst is not enabled) as you can see in the profiler:
    new_job.jpg

    This is how my code looks like now (this is a simplified version I removed the game logic for a better reading):

    Code (CSharp):
    1. using Unity.Entities;
    2. using Unity.Collections;
    3. using Unity.Burst;
    4. using Unity.Jobs;
    5.  
    6. [UpdateInGroup(typeof(UpdateGroupInput))]
    7. [UpdateAfter(typeof(AIPathfinderSystem))]
    8. public class AIStunSystem : JobComponentSystem
    9. {
    10.     [BurstCompile]
    11.     [ExcludeComponent(
    12.         typeof(FrozenData),
    13.         typeof(StunnedData),
    14.         typeof(JumpData))]
    15.     struct Job : IJobForEachWithEntity<PlayerInputData, AIData, PathfinderData, StunPowerData, RotationData, PlayerData>
    16.     {
    17.         public AISettingsData settings;
    18.  
    19.         [NativeDisableParallelForRestriction]
    20.         [ReadOnly] public BufferFromEntity<AICellElement> ai_buffers;
    21.  
    22.         [DeallocateOnJobCompletion] [ReadOnly] public NativeArray<ArchetypeChunk> target_chunks;
    23.         [ReadOnly] public ArchetypeChunkEntityType entity_type;
    24.         [ReadOnly] public ArchetypeChunkComponentType<MapPositionData> map_position_type;
    25.         [ReadOnly] public ArchetypeChunkComponentType<PlayerData> player_type;
    26.  
    27.         public void Execute(Entity entity, int index, ref PlayerInputData input, [ReadOnly]ref AIData ai, [ReadOnly]ref PathfinderData pathfinder, [ReadOnly]ref StunPowerData power, [ReadOnly]ref RotationData rotation, [ReadOnly]ref PlayerData player)
    28.         {
    29.             if (!ai_buffers.Exists(entity))
    30.                 return;
    31.  
    32.             var ai_buffer = ai_buffers[entity];
    33.  
    34.             bool use_power = false;
    35.          
    36.             //My game logic make several calls to SomeFunction()
    37.             for (int i = 0; i < 5; i++)
    38.             {
    39.                 if (SomeFunction(ai_buffer))
    40.                 {
    41.                     use_power = true;
    42.                     break;
    43.                 }
    44.             }
    45.  
    46.             if (use_power)
    47.             {
    48.                 input.Special = true;
    49.             }
    50.         }
    51.  
    52.         bool SomeFunction(DynamicBuffer<AICellElement> buffer)
    53.         {
    54.             for (int i = 0; i < target_chunks.Length; i++)
    55.             {
    56.                 var chunk = target_chunks[i];
    57.                 var target_positions = chunk.GetNativeArray(map_position_type);
    58.                 var target_entities = chunk.GetNativeArray(entity_type);
    59.                 var target_players = chunk.GetNativeArray(player_type);
    60.  
    61.                 for (int j = 0; j < target_entities.Length; j++)
    62.                 {
    63.                     //Some game logic here...
    64.                 }
    65.             }
    66.  
    67.             return false;
    68.         }
    69.     }
    70.  
    71.     private AIPrepareSystem prepare_system;
    72.     private UpdateGroundSystem ground_system;
    73.  
    74.     private EntityQuery target_group;
    75.  
    76.     protected override void OnCreate()
    77.     {
    78.         prepare_system = World.GetOrCreateSystem<AIPrepareSystem>();
    79.         ground_system = World.GetOrCreateSystem<UpdateGroundSystem>();
    80.  
    81.         target_group = GetEntityQuery(
    82.                     ComponentType.ReadOnly<PlayerData>(),
    83.                     ComponentType.ReadOnly<MapPositionData>(),
    84.                     ComponentType.Exclude<InvisibleData>(),
    85.                     ComponentType.Exclude<JumpData>());
    86.     }
    87.  
    88.     protected override JobHandle OnUpdate(JobHandle inputDeps)
    89.     {
    90.         if (target_group.CalculateLength() == 0)
    91.             return inputDeps;
    92.  
    93.         var entity_type = GetArchetypeChunkEntityType();
    94.         var map_position_type = GetArchetypeChunkComponentType<MapPositionData>(true);
    95.         var player_type = GetArchetypeChunkComponentType<PlayerData>(true);
    96.  
    97.         var target_chunks = target_group.CreateArchetypeChunkArray(Allocator.TempJob, out var target_handle);
    98.         inputDeps = JobHandle.CombineDependencies(inputDeps, target_handle);
    99.  
    100.         //Maybe change later to add to entity?
    101.         AISettingsData ai_settings = GameBootstrap.AIs.GetAI(GameBootstrap.User.CurrentRuleSet.AI).ToComponent();
    102.  
    103.         var job = new Job
    104.         {
    105.             settings = ai_settings,
    106.             target_chunks = target_chunks,
    107.             entity_type = entity_type,
    108.             player_type = player_type,
    109.             map_position_type = map_position_type,
    110.             ai_buffers = GetBufferFromEntity<AICellElement>(true)
    111.         }.Schedule(this, inputDeps);
    112.  
    113.         inputDeps = JobHandle.CombineDependencies(inputDeps, job);
    114.  
    115.         return inputDeps;
    116.     }
    117. }
    Thank you very mucha ll for your help, things are much more clear for me now.
    All that is left is to convert my other 80 systems... again! ;)
     
    Last edited: May 4, 2019
    recursive and wobes like this.