/// <summary> /// Non-syncronized access to a float value from multiple threads. /// In each thread adds 1 to a total value (initially zeroed) specified number of repetition. /// Returns the total value (will be wrong on multi-CPU machines). /// RunTime contains the time required for this operation. /// </summary> public float SumValuesNoSync(int threadCount, int repetitions) { BlockingThreadPool threadPool = new BlockingThreadPool(threadCount); BlockingThreadPool.JobBase[] works = new BlockingThreadPool.JobBase[threadCount]; for (int t = 0; t < threadCount; ++t) { works[t] = new BlockingThreadPool.Job <int> { Param1 = repetitions, Execute = NoSyncWork }; } _totalValue = 0; DateTime startTime = DateTime.Now; threadPool.ExecuteJobs(works); RunTime = (DateTime.Now - startTime).TotalSeconds; return(_totalValue); }
/// <summary> /// Run a number of threads doing some repetitive operations. /// Currently the job is to calculating Fibonacci numbers and storing them in an array. /// At the end the number of operations per seconds for all threads actually done is returned. /// This allows to estimate the optimal number of threads to perform some calcluation task. /// </summary> /// <param name="threadsCount"></param> /// <param name="repeatCount"></param> /// <returns></returns> public double MultithreadPerformance(int threadsCount, UInt64 repeatCount) { BlockingThreadPool threadPool = new BlockingThreadPool(threadsCount); BlockingThreadPool.Job[] works = new BlockingThreadPool.Job[threadsCount]; for (int w = 0; w < threadsCount; ++w) { Work work = new Work(repeatCount); works[w] = new BlockingThreadPool.Job { DoDelegate = work.Execute }; } DateTime startTime = DateTime.Now; threadPool.ExecuteJobs(works); double workTime = (DateTime.Now - startTime).TotalSeconds; double repPerSec = ((double)threadsCount * repeatCount) / workTime; Console.WriteLine( "MultithreadPerformance treads: {0}, repeatitions/tread: {1:0,0}, work time: {2:0.000} s, rep/s: {3:0,0}", threadsCount, repeatCount, workTime, repPerSec); return(repPerSec); }
private void InternalStop() { if (_retrieveThreadPool != null) { _retrieveThreadPool.Stop(false); _retrieveThreadPool = null; } if (_imageBoxEnumerator != null) { _imageBoxEnumerator.Dispose(); _imageBoxEnumerator = null; } }
private void InternalStart() { int retrieveConcurrency = Prefetch.Default.RetrieveConcurrency; if (retrieveConcurrency == 0) return; _imageBoxEnumerator = new ViewerFrameEnumerator(ImageViewer, Math.Max(Prefetch.Default.SelectedWeighting, 1), Math.Max(Prefetch.Default.UnselectedWeighting, 0), Prefetch.Default.ImageWindow); _retrieveThreadPool = new BlockingThreadPool<Frame>(_imageBoxEnumerator, RetrieveFrame); _retrieveThreadPool.ThreadPoolName = "Retrieve"; _retrieveThreadPool.Concurrency = retrieveConcurrency; _retrieveThreadPool.ThreadPriority = ThreadPriority.BelowNormal; _retrieveThreadPool.Start(); }
public double ComputeDominationLoss(double[] scores) { int chunkSize = 1 + Dataset.NumQueries / BlockingThreadPool.NumThreads; // Minimizes the number of repeat computations in sparse array to have each thread take as big a chunk as possible double totalOutput = 0.0; var _lock = new Object(); for (int queryBegin = 0; queryBegin < Dataset.NumQueries; queryBegin += chunkSize) { BlockingThreadPool.RunOrBlock(delegate(int startQuery, int endQuery) { double output = 0.0; for (int query = startQuery; query <= endQuery; query++) { int begin = Dataset.Boundaries[query]; int end = Dataset.Boundaries[query + 1]; if (end - begin <= 1) { continue; } int bestDoc = _bestDocsPerQuery.BestDocs[query]; int secondBestDoc = _bestDocsPerQuery.SecondBestDocs[query]; double bestDocScore = scores[bestDoc]; double secondBestDocScore = scores[secondBestDoc]; // find max score double max = double.NegativeInfinity; double maxNotBest = double.NegativeInfinity; for (int d = begin; d < end; ++d) { if (max < scores[d]) { max = scores[d]; } if (d != bestDoc && maxNotBest < scores[d]) { maxNotBest = scores[d]; } } // sum of exponents and sum of all but best double sum = 0.0; double sumAllButBest = 0.0; for (int d = begin; d < end; ++d) { sum += Math.Exp(scores[d] - max); if (d != bestDoc) { sumAllButBest += Math.Exp(scores[d] - maxNotBest); } } output += max - bestDocScore + Math.Log(sum) + 0.5 * (maxNotBest - secondBestDocScore + Math.Log(sumAllButBest)); } lock (_lock) { totalOutput += output; } }, queryBegin, Math.Min(queryBegin + chunkSize - 1, Dataset.NumQueries - 1)); } BlockingThreadPool.BlockUntilAllWorkItemsFinish(); return(totalOutput); }
/// <summary> /// Make segmens of different sizes, dividing array in the following proportion: 1, 2, 4, etc. /// This allows to test if the thread pool uses free threads to process pending jobs. /// Due to different segment sizes the better performance in this test will be reached with the /// greater number of segments, because idle time of threads will be lower. /// </summary> void FillArray(int size, int numThreads, int numSegments, int repetitions) { BlockingThreadPool pool = new BlockingThreadPool(numThreads); _array = new byte[size]; int [] segmentSizes = new int[numSegments]; int curSize = 0; double factor = size / (Math.Pow(2, numSegments) - 1); for (int s = 0; s < numSegments - 1; ++s) { segmentSizes[s] = (int)(Math.Pow(2, s) * factor); curSize += segmentSizes[s]; } segmentSizes[numSegments - 1] = size - curSize; DateTime start = DateTime.Now; int begin; for (int r = 0; r < repetitions; ++r) { BlockingThreadPool.JobBase[] jobs = new BlockingThreadPool.JobBase[numSegments]; begin = 0; for (int w = 0; w < numSegments; ++w) { Parameters p = new Parameters { Start = begin, Count = segmentSizes[w], Value = w }; if (w == numSegments - 1) { p.Count = size - p.Start; } jobs[w] = new BlockingThreadPool.Job <Parameters> { Param1 = p, Execute = FillArrayThreadFunc }; begin += p.Count; } pool.ExecuteJobs(jobs); } double time = (DateTime.Now - start).TotalMilliseconds; Console.WriteLine("Array size: {0:0,0}, threads: {1,3}, segments: {2,3}, repeats: {3,3}, time: {4:0.0} ms, el/s: {5:#,#}", size, numThreads, numSegments, repetitions, time, (double)size * repetitions / (time * 0.001)); // Now verify values. pool.Dispose(); begin = 0; for (int s = 0; s < numSegments; ++s) { int end = begin + segmentSizes[s]; for (int i = begin; i < end; ++i) { // Check before assert, it is much faster. if (s != _array[i]) { Assert.AreEqual(s, _array[i]); } } begin = end; } }