Exemple #1
0
 public static void DegreeOfParallelism_Barrier(Labeled <ParallelQuery <int> > labeled, int count, int degree)
 {
     using (ThreadPoolHelpers.EnsureMinThreadsAtLeast(degree))
     {
         var barrier = new Barrier(degree);
         Assert.Equal(Functions.SumRange(0, count), labeled.Item.WithDegreeOfParallelism(degree).Sum(x => { barrier.SignalAndWait(); return(x); }));
     }
 }
Exemple #2
0
 public static void DegreeOfParallelism_Pipelining(Labeled <ParallelQuery <int> > labeled, int count, int degree)
 {
     using (ThreadPoolHelpers.EnsureMinThreadsAtLeast(degree))
     {
         int expected = 1 - count;
         foreach (int result in labeled.Item.WithDegreeOfParallelism(degree).Select(x => - x).OrderBy(x => x))
         {
             Assert.Equal(expected++, result);
         }
     }
 }
Exemple #3
0
 public static void DegreeOfParallelism_Throttled_Pipelining(Labeled <ParallelQuery <int> > labeled, int count, int degree)
 {
     using (ThreadPoolHelpers.EnsureMinThreadsAtLeast(degree))
     {
         Assert.True(labeled.Item.WithDegreeOfParallelism(degree).Select(x =>
         {
             var sw = new SpinWait();
             while (!sw.NextSpinWillYield)
             {
                 sw.SpinOnce();                           // brief spin to wait a small amount of time
             }
             return(-x);
         }).OrderBy(x => x).SequenceEqual(ParallelEnumerable.Range(1 - count, count).AsOrdered()));
     }
 }
Exemple #4
0
 static TaskRunSyncTests()
 {
     // Tests that create tasks which need to run concurrently require us to bump up the number
     // of threads in the pool, or else we need to wait for it to grow dynamically to the desired number
     ThreadPoolHelpers.EnsureMinThreadsAtLeast(10);
 }