Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
103 changes: 44 additions & 59 deletions JobScheduler.Benchmarks/Benchmark.cs
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
using System.Diagnostics;
using System.Numerics;
using BenchmarkDotNet.Jobs;
using BenchmarkDotNet.Toolchains.CsProj;
using BenchmarkDotNet.Toolchains.InProcess.NoEmit;
using CommunityToolkit.HighPerformance;
using Schedulers;
using Schedulers.Benchmarks;
using Schedulers.Utils;
Expand All @@ -14,7 +10,6 @@ public struct EmptyJob : IJob
{
public void Execute()
{

}
}

Expand Down Expand Up @@ -69,7 +64,7 @@ public HeavyCalculationJob(int first, int second)

public void Execute()
{
for (var i = 0; i < 100; i++)
for (var i = 0; i < 10; i++)
{
_first = double.Sqrt(_second);
_second = double.Sqrt(_first) + 1;
Expand All @@ -86,7 +81,7 @@ public void RunVectorized(int index, int end)

public void RunSingle(int index)
{
throw new NotImplementedException();
Execute();
}
}

Expand All @@ -105,7 +100,7 @@ public void RunVectorized(int index, int end)

public void RunSingle(int index)
{
if (!acceptsNewEntries) throw new("Should not accept new entries");
if (!acceptsNewEntries) throw new($"Should not accept new entries {index}");
var newValue = Interlocked.Increment(ref total);
// Console.WriteLine($" {index} {newValue}");
}
Expand All @@ -131,24 +126,26 @@ public long End(int jobs, string type)

public class Benchmark
{
private const int jobCount = 200000;
private const int loopCount = 100;
private const int jobCount = 200;
private const int loopCount = 100000;

private static void CorrectnessTestJob()
{
using var jobScheduler = new JobScheduler();
var timer = new JobTimer();
for (var sindex = 0; sindex < loopCount; sindex++)
{
TestCorrectnessJob.total = 0;
TestCorrectnessJob.acceptsNewEntries = true;
var job = new ParallelJobProducer<TestCorrectnessJob>(jobCount, new(), jobScheduler);
jobScheduler.Wait(job.GetHandle());
TestCorrectnessJob.acceptsNewEntries = false;
var expected = jobCount;
if (TestCorrectnessJob.total != expected)
var job = new ParallelJobProducer<TestCorrectnessJob>(0, jobCount, new());
ParallelForJobCommon.GlobalScheduler.Flush(job.GetHandle());
ParallelForJobCommon.GlobalScheduler.Wait(job.GetHandle());
// Thread.Sleep(1);
// Console.WriteLine($"UnfinishedJobs {job.GetHandle().UnfinishedJobs} total {TestCorrectnessJob.total}");
// TestCorrectnessJob.acceptsNewEntries = false;
var total = TestCorrectnessJob.total;
if (total != jobCount)
{
throw new($"{TestCorrectnessJob.total} != {expected}");
throw new($"{total} != {jobCount}");
}
}

Expand All @@ -166,8 +163,7 @@ private static void BenchB()
{
var job = new HeavyCalculationJob(index, index);
var handle = jobScheduler.Schedule(job);
handle.Parent = parentHandle.Index;
handle.SetDependsOn(parentHandle);
handle.SetParent(parentHandle);
jobScheduler.Flush(handle);
}

Expand All @@ -178,50 +174,31 @@ private static void BenchB()
timer.End(jobCount * loopCount, "Every calculation job is its own handle");
}

private static void BenchC()
{
using var jobScheduler = new JobScheduler();
var timer = new JobTimer();
for (var sindex = 0; sindex < loopCount; sindex++)
{
var job = new ParallelJobProducer<HeavyCalculationJob>(jobCount, new(), jobScheduler);
jobScheduler.Wait(job.GetHandle());
}

timer.End(jobCount * loopCount, "ParallelJobProducer");
}

private static void BenchD()
{
var timer = new JobTimer();
for (var sindex = 0; sindex < loopCount; sindex++)
{
Parallel.For(0, jobCount, i =>
{
var job = new HeavyCalculationJob(i, i);
job.Execute();
});
}

timer.End(jobCount * loopCount, "Just Parallel.For");
}

private static long BenchVector(bool dontUseVector)
private static long BenchVector(bool useVector)
{
using var jobScheduler = new JobScheduler();
var timer = new JobTimer();
var data = new VectorCalculationJob { a = new float[jobCount], b = new float[jobCount], result = new float[jobCount], Repetitions = 500 };
var parentJob = ParallelForJobCommon.GlobalScheduler.Schedule();
for (var sindex = 0; sindex < loopCount; sindex++)
{
var job = new ParallelJobProducer<VectorCalculationJob>(jobCount, data, jobScheduler, 16, !dontUseVector);
jobScheduler.Wait(job.GetHandle());
var job = new ParallelJobProducer<VectorCalculationJob>(0, jobCount, data, loopSize: 16, onlySingle: !useVector);
job.GetHandle().SetParent(parentJob);
ParallelForJobCommon.GlobalScheduler.Flush(job.GetHandle());
}

return timer.End(jobCount * loopCount, $"Use vector: {!dontUseVector}");
ParallelForJobCommon.GlobalScheduler.Flush(parentJob);
ParallelForJobCommon.GlobalScheduler.Wait(parentJob);
return timer.End(jobCount * loopCount, $"Use vector: {useVector}");
}

private static void Main(string[] args)
{
ParallelJobBenchmark.Benchmark();
return;
ParallelForJobCommon.SetScheduler(new());
// new JobHierarchyTest();
// ParallelForJobCommon.DisposeScheduler();
// return;

// var config = DefaultConfig.Instance.AddJob(Job.Default
// .WithWarmupCount(2)
// .WithMinIterationCount(10)
Expand All @@ -232,16 +209,24 @@ private static void Main(string[] args)
// config = config.WithOptions(ConfigOptions.DisableOptimizationsValidator);
// BenchmarkRunner.Run<JobSchedulerBenchmark>(config);
// return;
for (var i = 0;; i++)
var continiousRatio = 0d;
for (var i = 0; i < 200000; i++)
{
// CorrectnessTestJob();
// BenchB();
// BenchC();
// BenchD();
var vectorized = BenchVector(true);
var nonVectorized = BenchVector(false);
Console.WriteLine($"Ratio {(double)nonVectorized / vectorized}");
// var vectorized = BenchVector(true);
// var nonVectorized = BenchVector(false);
// var ratio = (double)nonVectorized / vectorized;
// Console.WriteLine($"Ratio {ratio}");
// continiousRatio += ratio;
// if (i % 10 == 0)
{
Console.WriteLine($"Continious ratio: {continiousRatio / (i + 1)}");
}
Thread.Sleep(1);
}
ParallelForJobCommon.DisposeScheduler();

//using var jobScheduler = new JobScheduler();

// Spawn massive jobs and wait for finish
Expand Down
109 changes: 109 additions & 0 deletions JobScheduler.Benchmarks/JobHierarchyTest.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
using Arch.Benchmarks;

namespace Schedulers.Benchmarks;

public class JobHierarchyTest
{
private class JobThing : IJob
{
public void Execute()
{
Interlocked.Increment(ref totalPassedJobs);
}
}

private int layers = 6;
private int growSizePerLayer = 10;
private int totalScheduledJobs = 0;
private static int totalPassedJobs = 0;
private JobScheduler jobScheduler = new();
private JobThing jobThing = new();

public JobHierarchyTest()
{
// InvertedPyramidJobs();
// InvertedPyramidJobs();
// InvertedPyramidJobs();
// InvertedPyramidJobs();
for (int i = 0; i < 10; i++)
{
PyramidJobs();
}

jobScheduler.Dispose();
}

private void PyramidJobs()
{
totalScheduledJobs = 0;
totalPassedJobs = 0;
var timer = new JobTimer();
var topJob = jobScheduler.Schedule();
AddPyramidJobs(topJob, 0);
jobScheduler.Flush(topJob);
jobScheduler.Wait(topJob);
timer.End(totalScheduledJobs, "PyramidJobs test");
var passedExpected = (int)Math.Pow(growSizePerLayer, layers);
Console.WriteLine($"Total jobs scheduled: {totalScheduledJobs}, total passed jobs: {totalPassedJobs}");
if (totalPassedJobs != passedExpected)
{
throw new($"Total passed jobs {totalPassedJobs} does not match expected {passedExpected}.");
}
}

private void InvertedPyramidJobs()
{
totalScheduledJobs = 0;
totalPassedJobs = 0;
var timer = new JobTimer();
var bottomJob = jobScheduler.Schedule();
var topJob = jobScheduler.Schedule();
AddInvertedPyramidJobs(bottomJob, 1, topJob);
jobScheduler.Flush(bottomJob);
jobScheduler.Flush(topJob);
jobScheduler.Wait(topJob);
timer.End(totalScheduledJobs, "InvertedPyramidJobs test");
var passedExpected = (int)Math.Pow(growSizePerLayer, layers - 1);
Console.WriteLine($"Total jobs scheduled: {totalScheduledJobs}, total passed jobs: {totalPassedJobs}");
if (totalPassedJobs != passedExpected)
{
throw new($"Total passed jobs {totalPassedJobs} does not match expected {passedExpected}.");
}
}

private void AddPyramidJobs(JobHandle parent, int layer)
{
if (layer >= layers)
{
return;
}

for (var i = 0; i < growSizePerLayer; i++)
{
var isNotLastLayer = layer + 1 < layers;
var handle = jobScheduler.Schedule(isNotLastLayer ? null : jobThing, parent);
AddPyramidJobs(handle, layer + 1);
jobScheduler.Flush(handle);
totalScheduledJobs++;
}
}

private void AddInvertedPyramidJobs(JobHandle source, int layer, JobHandle topJob)
{
if (layer >= layers)
{
return;
}

for (var i = 0; i < growSizePerLayer; i++)
{
var isLastLayer = layer + 1 == layers;
var target = jobScheduler.Schedule(isLastLayer ? jobThing : null);
target.SetDependsOn(source);
target.SetParent(topJob);
AddInvertedPyramidJobs(target, layer + 1, topJob);
jobScheduler.Flush(target);
totalScheduledJobs++;
}
}
}
8 changes: 4 additions & 4 deletions JobScheduler.Benchmarks/JobSchedulerBenchmark.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
using Arch.Benchmarks;
using CommunityToolkit.HighPerformance;
using CommunityToolkit.HighPerformance;
using Schedulers.Utils;

namespace Schedulers.Benchmarks;
Expand All @@ -9,6 +8,7 @@ public struct CalculationJob : IJob
private int _first;
private int _second;
public static int _result;

public CalculationJob(int first, int second)
{
_first = first;
Expand All @@ -35,8 +35,8 @@ public class JobSchedulerBenchmark
[IterationSetup]
public void Setup()
{
_jobScheduler = new JobScheduler();
_jobHandles = new List<JobHandle>(Jobs);
_jobScheduler = new();
_jobHandles = new(Jobs);
}

[IterationCleanup]
Expand Down
Loading