/// <summary> /// Starts a number of parallel tasks. /// </summary> public (long, int) DoIt(RewriterTaskFactory factory) { var taskUnits = new List <RewriterTask>(); for (int i = 0; i < workUnit.Length; i += chunkSize) { taskUnits.Add(factory.Create(workUnit, i, i + chunkSize)); } var results = new TaskResult[taskUnits.Count]; #if !NO_PARALLEL_THREADS var msec = Time(() => Parallel.ForEach(taskUnits, (src, state, n) => { results[n] = src.Run(); })); #else var msec = Time(() => { int n = -1; foreach (var task in taskUnits) { ++n; results[n] = task.Run(); } }); #endif return(msec, results.Sum(r => r.Clusters !.Length)); }
private static void TestArchitecture(RekoConfigurationService cfg, ArchitectureDefinition archDef) { Console.Out.WriteLine("= Testing {0} ============", archDef.Description); var work = MakeWorkUnit(cfg, archDef, 42); if (work is null) { Console.Out.WriteLine("*** Failed to load {0}", archDef.Name); return; } var factories = new RewriterTaskFactory[] { new LinearTaskFactory(), new ShingleTaskFactory(), new LinearShingleTaskFactory() }; foreach (var factory in factories) { CollectStatistics(work, factory); } }
private static void CollectStatistics(WorkUnit work, RewriterTaskFactory factory) { for (int chunkSize = MinChunkSize; chunkSize <= MemorySize; chunkSize *= 16) { Console.Out.WriteLine(" {0}, chunk size {1}", factory.Name, chunkSize); long sum = 0; long totClusters = 0; for (int rep = 0; rep < CReps; ++rep) { var sc = new ChunkScanner(work, chunkSize); var(msec, clusters) = sc.DoIt(factory); sum += msec; totClusters += clusters; Console.Out.Write(" {0,7}", msec); Console.Out.Flush(); GC.Collect(); } var avg = sum / (double)CReps; var perInstr = sum * 1000.0 / totClusters; Console.Out.WriteLine(", avg: {0:0.000} msec; {1:0.000} usec/rtl cluster {2,6} clusters;", avg, perInstr, totClusters); } }