diff --git a/src/Arch.SourceGen/Queries/InlineParallelQuery.cs b/src/Arch.SourceGen/Queries/InlineParallelQuery.cs index eee026a..4aa480f 100644 --- a/src/Arch.SourceGen/Queries/InlineParallelQuery.cs +++ b/src/Arch.SourceGen/Queries/InlineParallelQuery.cs @@ -24,38 +24,7 @@ public static void AppendHpParallelQuery(this StringBuilder builder, int amount) var innerJob = new IForEachJob(); innerJob.ForEach = iForEach; - var pool = JobMeta>>.Pool; - var query = Query(in description); - foreach (var archetype in query.GetArchetypeIterator()) - { - var archetypeSize = archetype.ChunkCount; - var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); - foreach (var range in part) - { - var job = pool.Get(); - job.Start = range.Start; - job.Size = range.Length; - job.Chunks = archetype.Chunks; - job.Instance = innerJob; - - var jobHandle = SharedJobScheduler.Schedule(job); - JobsCache.Add(job); - JobHandles.Add(jobHandle); - } - - // Return jobs to pool - SharedJobScheduler.Flush(); - JobHandle.CompleteAll(JobHandles.Span); - - for (var index = 0; index < JobsCache.Count; index++) - { - var job = Unsafe.As>>(JobsCache[index]); - pool.Return(job); - } - - JobHandles.Clear(); - JobsCache.Clear(); - } + InlineParallelChunkQuery(in description, innerJob); } """; @@ -84,38 +53,7 @@ public static void AppendHpeParallelQuery(this StringBuilder builder, int amount var innerJob = new IForEachWithEntityJob(); innerJob.ForEach = iForEach; - var pool = JobMeta>>.Pool; - var query = Query(in description); - foreach (var archetype in query.GetArchetypeIterator()) { - - var archetypeSize = archetype.ChunkCount; - var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); - foreach (var range in part) - { - var job = pool.Get(); - job.Start = range.Start; - job.Size = range.Length; - job.Chunks = archetype.Chunks; - job.Instance = innerJob; - - var jobHandle = SharedJobScheduler.Schedule(job); - JobsCache.Add(job); - JobHandles.Add(jobHandle); - } - - // Return jobs to pool - SharedJobScheduler.Flush(); - JobHandle.CompleteAll(JobHandles.Span); - - for (var index = 0; index < JobsCache.Count; index++) - { - var job = Unsafe.As>>(JobsCache[index]); - pool.Return(job); - } - - JobHandles.Clear(); - JobsCache.Clear(); - } + InlineParallelChunkQuery(in description, innerJob); } """; diff --git a/src/Arch.SourceGen/Queries/ParallelQuery.cs b/src/Arch.SourceGen/Queries/ParallelQuery.cs index 2d75a29..361a294 100644 --- a/src/Arch.SourceGen/Queries/ParallelQuery.cs +++ b/src/Arch.SourceGen/Queries/ParallelQuery.cs @@ -24,37 +24,7 @@ public static StringBuilder AppendParallelQuery(this StringBuilder sb, int amoun var innerJob = new ForEachJob<{{generics}}>(); innerJob.ForEach = forEach; - var pool = JobMeta>>.Pool; - var query = Query(in description); - foreach (var archetype in query.GetArchetypeIterator()) { - - var archetypeSize = archetype.ChunkCount; - var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); - foreach (var range in part) - { - var job = pool.Get(); - job.Start = range.Start; - job.Size = range.Length; - job.Chunks = archetype.Chunks; - job.Instance = innerJob; - - var jobHandle = SharedJobScheduler.Schedule(job); - JobsCache.Add(job); - JobHandles.Add(jobHandle); - } - - SharedJobScheduler.Flush(); - JobHandle.CompleteAll(JobHandles.Span); - - for (var index = 0; index < JobsCache.Count; index++) - { - var job = Unsafe.As>>(JobsCache[index]); - pool.Return(job); - } - - JobHandles.Clear(); - JobsCache.Clear(); - } + InlineParallelChunkQuery(in description, innerJob); } """; @@ -84,37 +54,7 @@ public static StringBuilder AppendParallelEntityQuery(this StringBuilder sb, int var innerJob = new ForEachWithEntityJob<{{generics}}>(); innerJob.ForEach = forEach; - var pool = JobMeta>>.Pool; - var query = Query(in description); - foreach (var archetype in query.GetArchetypeIterator()) - { - var archetypeSize = archetype.ChunkCount; - var part = new RangePartitioner(Environment.ProcessorCount, archetypeSize); - foreach (var range in part) - { - var job = pool.Get(); - job.Start = range.Start; - job.Size = range.Length; - job.Chunks = archetype.Chunks; - job.Instance = innerJob; - - var jobHandle = SharedJobScheduler.Schedule(job); - JobsCache.Add(job); - JobHandles.Add(jobHandle); - } - - SharedJobScheduler.Flush(); - JobHandle.CompleteAll(JobHandles.Span); - - for (var index = 0; index < JobsCache.Count; index++) - { - var job = Unsafe.As>>(JobsCache[index]); - pool.Return(job); - } - - JobHandles.Clear(); - JobsCache.Clear(); - } + InlineParallelChunkQuery(in description, innerJob); } """; diff --git a/src/Arch/Arch.csproj b/src/Arch/Arch.csproj index 19782d7..4fe9b38 100644 --- a/src/Arch/Arch.csproj +++ b/src/Arch/Arch.csproj @@ -22,7 +22,8 @@ Fixed issue where Unsafe.As did not work on .Net 2.1 for the Dangerous-Utils. Dangerous API now allows setting/getting of recycled ids. Fixed archetype duplication after loading a save. -Fixed .Add when a newly non registered component was added. +Fixed .Add when a newly non registered component was added. +Now makes use of the updated and improved JobScheduler 1.1.1. c#;.net;.net6;.net7;ecs;game;entity;gamedev; game-development; game-engine; entity-component-system;stride;unity;godot; https://github.com/genaray/Arch