static int[] DoSequentialAggregation(int count, double mean, double stdDev) { var generator = new Random(SampleUtilities.MakeRandomSeed()); int[] histogram = MakeEmptyHistogram(); for (int i = 0; i < count; i++) { // get the next input value var sample = generator.NextDouble(); if (sample > 0.0) { // MAP: perform a simulation trial for the sample value var simulationResult = DoSimulation(sample, mean, stdDev); // REDUCE: merge the result of simulation into a histogram int histogramBucket = (int)Math.Floor(simulationResult / BucketSize); if (0 <= histogramBucket && histogramBucket < TableSize) { histogram[histogramBucket] += 1; } } } return(histogram); }
static void Main() { // Note: for consistent timing results, run these without the debugger. // Observe CPU usage using the task manager. On a multicore machine, the sequential // version will use less CPU and execute more slowly than the parallel versions. Console.WriteLine("Basic Futures Samples\n"); #if (DEBUG) Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif Console.WriteLine("Starting..."); // timed comparison between sequential and two ways of using the futures pattern SampleUtilities.TimedRun(Example1, "Sequential"); SampleUtilities.TimedRun(Example2, "Parallel, using F1 future"); SampleUtilities.TimedRun(Example3, "Parallel, using F2/F3 future"); // additional variants for comparison Console.WriteLine(); Console.WriteLine("Other variants, for comparison--"); SampleUtilities.TimedRun(Example4, "Parallel, using F2 future and F3 continuation"); SampleUtilities.TimedRun(Example5, "Parallel, using F1 and F2/F3 future"); SampleUtilities.TimedRun(Example6, "Parallel, with try/catch block"); Console.WriteLine("\nRun complete... press enter to finish."); Console.ReadLine(); }
void RunParallelForExample(Func <double[]> action, string label) { // clean up from previous run GC.Collect(); try { double[] result = null; SampleUtilities.TimedAction(() => { result = action(); }, " " + label); if (VerifyResult) { for (int i = 0; i < NumberOfSteps; i++) { EnsureResult(result[i], i); } } } catch (AggregateException ae) { ae.Handle((e) => { Console.WriteLine(" {0}: Failed with {1}", label, e.GetType().ToString()); return(true); }); } catch (ParallelForExampleException ex) { Console.WriteLine(" {0}: Failed with unaggregated {1} ", label, ex.GetType().ToString()); } catch (Exception ex) { Console.WriteLine(" {0}: Failed with unaggregated {1} ", label, ex.GetType().ToString()); } }
static void MainTask() { Console.WriteLine("Basic Aggregation Samples\n"); #if DEBUG Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif var sequence = SampleUtilities.Range(SequenceSize); SampleUtilities.TimedAction(() => Chapter4Sample01Sequential(sequence), "calculate sum, sequential for loop"); GC.Collect(); SampleUtilities.TimedAction(() => Chapter4Sample01IncorrectParallel(sequence), "calculate sum, incorrectly coded parallel loop"); GC.Collect(); SampleUtilities.TimedAction(() => Chapter4Sample02Linq(sequence), "calculate sum, LINQ (sequential)"); GC.Collect(); SampleUtilities.TimedAction(() => Chapter4Sample02Plinq(sequence), "calculate sum, PLINQ (parallel)"); GC.Collect(); SampleUtilities.TimedAction(() => Chapter4Sample03Plinq(sequence), "custom aggregation (product) PLINQ (parallel)"); GC.Collect(); SampleUtilities.TimedAction(() => Chapter4Sample01Parallel(sequence), "calculate sum, parallel for each"); GC.Collect(); SampleUtilities.TimedAction(() => Chapter4Sample01ParallelPartitions(sequence), "calculate sum, parallel partitions"); GC.Collect(); Console.WriteLine("\nRun complete... press enter to finish."); Console.ReadLine(); }
const double TreeDensity = 0.75; // P(left child node exists), P(right child node exists) for interior nodes static void MainTask() { Console.WriteLine("Basic Dynamic Task Samples\n"); #if (DEBUG) Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif Console.WriteLine("Tree Walking"); var tree = MakeTree(TreeSize, TreeDensity); SampleUtilities.TimedAction(() => Chapter6Example1Sequential(tree), "tree traversal, sequential"); SampleUtilities.TimedAction(() => Chapter6Example1Parallel(tree), "tree traversal, parallel"); SampleUtilities.TimedAction(() => Chapter6Example1Parallel2(tree), "tree traversal, parallel - attached to parent"); SampleUtilities.TimedAction(() => Chapter6Example01ParallelWhileNotEmpty(tree), "parallel while not empty - Parallel.ForEach"); SampleUtilities.TimedAction(() => Chapter6Example01ParallelWhileNotEmpty2(tree), "parallel while not empty - parallel tasks"); Console.WriteLine("\nRun complete... press enter to finish."); Console.ReadKey(); }
/// <summary> /// Inserts Gaussian noise into a bitmap. /// </summary> /// <param name="source">Bitmap to be processed</param> /// <param name="amount">Standard deviation of perturbation for each color channel.</param> /// <returns>New, speckled bitmap</returns> /// <remarks> /// This code uses Bitmap.GetPixel and SetPixel methods for clarity. An implementation using Bitmap.LockBits /// and then directly modifying the image data may be faster, espectially for large images. /// </remarks> public static Bitmap AddNoise(this Bitmap source, double amount) { if (source == null) { throw new ArgumentNullException("source"); } Bitmap bitmap = null; Bitmap tempBitmap = null; try { var generator = new GaussianRandom(0.0, amount, SampleUtilities.MakeRandomSeed()); tempBitmap = new Bitmap(source.Width, source.Height); for (int y = 0; y < tempBitmap.Height; y++) { for (int x = 0; x < tempBitmap.Width; x++) { var pixel = source.GetPixel(x, y); Color newPixel = AddPixelNoise(pixel, generator); tempBitmap.SetPixel(x, y, newPixel); } } bitmap = tempBitmap; tempBitmap = null; } finally { if (tempBitmap != null) { tempBitmap.Dispose(); } } return(bitmap); }
private Guid CreateTemplateWithWidgetAndBasePageOnIt(PageTemplateFramework framework, Controller widgetController, out Guid pageId, out string pageUrl) { var pageManager = PageManager.GetManager(); // Create template var templateId = framework == PageTemplateFramework.Hybrid ? ServerOperations.Templates().CreateHybridMVCPageTemplate(FrontendModuleFilterTests.TemplateTitle + Guid.NewGuid().ToString("N")) : ServerOperations.Templates().CreatePureMVCPageTemplate(FrontendModuleFilterTests.TemplateTitle + Guid.NewGuid().ToString("N")); // Place widget on template var mvcProxy = new MvcControllerProxy(); mvcProxy.ControllerName = widgetController.GetType().FullName; mvcProxy.Settings = new ControllerSettings(widgetController); SampleUtilities.AddControlToTemplate(templateId, mvcProxy, "Body", "FrontendModuleFilterTestsWidgetCaption"); // Create page with template var template = pageManager.GetTemplates().Where(t => t.Id == templateId).SingleOrDefault(); pageId = FeatherServerOperations.Pages().CreatePageWithTemplate(template, "TestPageName", "test-page-url"); var page = pageManager.GetPageNode(pageId); pageUrl = RouteHelper.GetAbsoluteUrl(page.GetFullUrl()); return(templateId); }
/// <summary> /// Command line arguments are: /// length - of array to sort /// threshold - array length to use InsertionSort instead of SequentialQuickSort /// </summary> static void Main(string[] args) { Console.WriteLine("Sort Sample\n"); #if DEBUG Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif int length = 40000000; // default int seed = 1; // seed for reproducible runs if (args.Length > 0) { length = Int32.Parse(args[0], CultureInfo.CurrentCulture); } if (args.Length > 1) { Sort.Threshold = Int32.Parse(args[1], CultureInfo.CurrentCulture); } Console.WriteLine(); var a = MakeArray(length, seed); PrintElements(a, 8); SampleUtilities.TimedRun(() => { Sort.SequentialQuickSort(a); return(a.Length); }, " Sequential QuickSort"); PrintElements(a, 8); Console.WriteLine(); a = MakeArray(length, seed); PrintElements(a, 8); SampleUtilities.TimedRun(() => { Sort.ParallelQuickSort(a); return(a.Length); }, " Parallel QuickSort"); PrintElements(a, 8); Console.WriteLine("\nRun complete... press enter to finish."); Console.ReadKey(); }
public Task <int> Start() { return(Task <int> .Factory.StartNew(() => { SampleUtilities.DoCpuIntensiveOperation(2.0); return 42; })); }
private void AddGridControlToPageTemplate(Guid pageId, string controlPath, string placeHolder, string caption) { var control = new GridControl(); control.Layout = controlPath; SampleUtilities.AddControlToTemplate(pageId, control, placeHolder, caption); }
public static void ImagePipelineMainLoop(Action <ImageInfo> displayFn, CancellationToken token, int algorithmChoice, Action <Exception> errorFn) { try { string sourceDir = Directory.GetCurrentDirectory(); // Ensure that frames are presented in sequence before invoking the user-provided display function. int imagesSoFar = 0; Action <ImageInfo> safeDisplayFn = info => { if (info.SequenceNumber != imagesSoFar) { throw new InvalidOperationException("Images processed out of order. Saw " + info.SequenceNumber.ToString() + " , expected " + imagesSoFar); } displayFn(info); imagesSoFar += 1; }; // Create a cancellation handle for inter-task signaling of exceptions. This cancellation // handle is also triggered by the incoming token that indicates user-requested // cancellation. using (CancellationTokenSource cts = CancellationTokenSource.CreateLinkedTokenSource(token)) { IEnumerable <string> fileNames = SampleUtilities.GetImageFilenames(sourceDir, MaxNumberOfImages); switch (algorithmChoice) { case 0: RunSequential(fileNames, sourceDir, safeDisplayFn, cts); break; case 1: RunPipelined(fileNames, sourceDir, QueueBoundedCapacity, safeDisplayFn, cts); break; case 2: RunLoadBalancedPipeline(fileNames, sourceDir, QueueBoundedCapacity, safeDisplayFn, cts, LoadBalancingDegreeOfConcurrency); break; default: throw new InvalidOperationException("Invalid algorithm choice."); } } } catch (AggregateException ae) { errorFn((ae.InnerExceptions.Count == 1) ? ae.InnerExceptions[0] : ae); } catch (Exception e) { errorFn(e); } }
StockDataCollection LoadNyseData() { SampleUtilities.DoIoIntensiveOperation(2.5, cts.Token); if (cts.Token.IsCancellationRequested) { return(null); } return(new StockDataCollection(MakeNyseSecurityInfo())); }
static void UpdatePredictionsParallel(AccountRepository accounts) { Parallel.ForEach(accounts.AllAccounts, account => { Trend trend = SampleUtilities.Fit(account.Balance); double prediction = trend.Predict(account.Balance.Length + NumberOfMonths); account.ParPrediction = prediction; account.ParWarning = prediction < account.Overdraft; }); }
static void UpdatePredictionsSequential(AccountRepository accounts) { foreach (Account account in accounts.AllAccounts) { Trend trend = SampleUtilities.Fit(account.Balance); double prediction = trend.Predict(account.Balance.Length + NumberOfMonths); account.SeqPrediction = prediction; account.SeqWarning = prediction < account.Overdraft; } }
StockDataCollection LoadFedHistoricalData() { SampleUtilities.DoIoIntensiveOperation(3.0 * speedFactor, cts.Token); if (cts.Token.IsCancellationRequested) { return(null); } return(new StockDataCollection(MakeFedSecurityInfo())); }
private void AddGridToPageTemplate(Guid pageTemplateId, PageTemplateFramework framework, string gridVirtualPath = ModuleUnloadTests.GridVirtualPath) { var placeholder = framework == PageTemplateFramework.Hybrid ? "Body" : "Contentplaceholder1"; var control = new GridControl() { Layout = gridVirtualPath }; SampleUtilities.AddControlToTemplate(pageTemplateId, control, placeholder, "9 + 3"); }
static void Chapter6Example01ParallelWhileNotEmpty2(Tree <string> tree) { for (int i = 0; i < N; i++) { ConcurrentBag <string> result = new ConcurrentBag <string>(); Walk5(tree, (data) => { SampleUtilities.DoCpuIntensiveOperation(Time); result.Add(data); }); } }
MarketRecommendation CompareModels(IEnumerable <MarketModel> models) { SampleUtilities.DoCpuIntensiveOperation(2.0 * speedFactor, cts.Token); if (cts.Token.IsCancellationRequested) { return(null); } else { return(ModelComparer.Run(models.ToArray())); } }
MarketModel RunModel(StockAnalysisCollection data) { SampleUtilities.DoCpuIntensiveOperation(2.0 * speedFactor, cts.Token); if (cts.Token.IsCancellationRequested) { return(null); } else { return(MarketModeler.Run(data)); } }
/// <summary> /// Normalize stock data. /// </summary> StockDataCollection NormalizeData(StockDataCollection marketData) { SampleUtilities.DoCpuIntensiveOperation(2.0 * speedFactor, cts.Token); if (cts.Token.IsCancellationRequested) { return(null); } else { return(new StockDataCollection(marketData)); } }
static void Chapter6Example1Sequential(Tree <string> tree) { for (int i = 0; i < N; i++) { List <string> result = new List <string>(); SequentialWalk(tree, (data) => { SampleUtilities.DoCpuIntensiveOperation(Time); result.Add(data); }); } Console.WriteLine(); }
static void Main() { Console.WriteLine("Basic Pipeline Samples\n"); #if (DEBUG) Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif int seed = Environment.TickCount; SampleUtilities.TimedAction(() => Chapter7Example01Sequential(seed), "Write sentences, sequential"); SampleUtilities.TimedAction(() => Chapter7Example01Pipeline(seed), "Write sentences, pipeline"); CheckResults(); Console.WriteLine("\nRun complete... press enter to finish."); Console.ReadKey(); }
private static void DecoratorExample() { const int maxImages = 1000; Console.WriteLine("Loading images..."); IList <Bitmap> images = LoadImages(maxImages); IImageEditor serial = new SerialEditor(); SampleUtilities.TimedAction(() => { serial.Rotate(RotateFlipType.RotateNoneFlipX, images); }, "Rotate, sequential"); IImageEditor parallel = new ParallelEditor(new SerialEditor()); SampleUtilities.TimedAction(() => { parallel.Rotate(RotateFlipType.RotateNoneFlipX, images); }, "Rotate, parallel"); }
static int[] DoParallelAggregationPlinq(int count, double mean, double stdDev) { // Aggregate<TSource, TAccumulate, TResult>( // this ParallelQuery<TSource> source, // Func<TAccumulate> seedFactory, // Func<TAccumulate, TSource, TAccumulate> updateAccumulatorFunc, // Func<TAccumulate, TAccumulate, TAccumulate> combineAccumulatorsFunc, // Func<TAccumulate, TResult> resultSelector); return(ParallelEnumerable.Range(0, count).Aggregate( // 1- create an empty local accumulator object // that includes all task-local state () => new Tuple <int[], Random>(MakeEmptyHistogram(), new Random(SampleUtilities.MakeRandomSeed())), // 2- run the simulation, adding result to local accumulator (localAccumulator, i) => { // With each iteration get the next random value var sample = localAccumulator.Item2.NextDouble(); if (sample > 0.0 && sample < 1.0) { // Perform a simulation trial for the sample value var simulationResult = DoSimulation(sample, mean, stdDev); // Put the result of simulation into the histogram of the local accumulator int histogramBucket = (int)Math.Floor(simulationResult / BucketSize); if (0 <= histogramBucket && histogramBucket < TableSize) { localAccumulator.Item1[histogramBucket] += 1; } } return localAccumulator; }, // 3- Combine local results pairwise. (localAccumulator1, localAccumulator2) => { return new Tuple <int[], Random>( CombineHistograms(localAccumulator1.Item1, localAccumulator2.Item1), null); }, // 4- Extract answer from final combination finalAccumulator => finalAccumulator.Item1 )); // Aggregate }
private static IList <Bitmap> LoadImages(int maxImages) { IEnumerable <string> paths = SampleUtilities.GetImageFilenames(Directory.GetCurrentDirectory(), maxImages); IList <Bitmap> images = new List <Bitmap>(); int i = 0; foreach (var img in paths) { images.Add(new Bitmap(Path.Combine(img))); if (i++ > maxImages) { break; } } return(images); }
protected void Bootstrapper_Initializing(object sender, Telerik.Sitefinity.Data.ExecutingEventArgs args) { if (args.CommandName == "RegisterRoutes") { var virtualPathConfig = Config.Get <VirtualPathSettingsConfig>(); var jobsModuleVirtualPathConfig = new VirtualPathElement(virtualPathConfig.VirtualPaths) { VirtualPath = "~/SFRealEstate/*", ResolverName = "EmbeddedResourceResolver", ResourceLocation = "Telerik.StarterKit.Modules.RealEstate" }; virtualPathConfig.VirtualPaths.Add(jobsModuleVirtualPathConfig); SampleUtilities.RegisterModule <TemplateImporterModule>("Template Importer", "This module imports templates from template builder."); } }
static void MainTask() { Console.WriteLine("Basic Parallel Tasks Samples\n"); #if DEBUG Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif SampleUtilities.TimedAction(Chapter3Sample01Sequential, "2 steps, sequential"); SampleUtilities.TimedAction(Chapter3Sample01ParallelTask, "2 steps (Task.Wait), parallel"); SampleUtilities.TimedAction(Chapter3Sample01ParallelInvoke, "2 steps, parallel invoke"); SampleUtilities.TimedAction(Chapter3Sample03, "Speculative Execution"); SampleUtilities.TimedAction(Chapter3Sample04_1, "Task.WaitAny"); ExampleOfIncorrectClosure(); ExampleOfCorrectClosure(); ExampleOfIncorrectDispose(); ExampleOfCorrectDispose(); Console.WriteLine("\nRun complete... press enter to finish."); Console.ReadLine(); }
StockDataCollection MergeMarketData(IEnumerable <StockDataCollection> allMarketData) { SampleUtilities.DoCpuIntensiveOperation(2.0 * speedFactor, cts.Token); var securities = new List <StockData>(); if (!cts.Token.IsCancellationRequested) { foreach (StockDataCollection md in allMarketData) { securities.AddRange(md); } } if (cts.Token.IsCancellationRequested) { return(null); } else { return(new StockDataCollection(securities)); } }
static void Main() { Console.WriteLine("Aggregation Sample\n"); #if DEBUG Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif const int trialCount = 5000000; const double mean = 102.5; const double stdDev = 15; int[] histogram1, histogram2, histogram3; Stopwatch s = new Stopwatch(); Console.WriteLine("Performing Sequential Aggregation..."); s.Start(); histogram1 = DoSequentialAggregation(trialCount, mean, stdDev); s.Stop(); Console.WriteLine(SampleUtilities.FormattedTime(s.Elapsed)); PrintHistogram(histogram1); Console.WriteLine("Performing Parallel Aggregation..."); s.Restart(); histogram2 = DoParallelAggregation(trialCount, mean, stdDev); s.Stop(); Console.WriteLine(SampleUtilities.FormattedTime(s.Elapsed)); PrintHistogram(histogram2); Console.WriteLine("Performing PLINQ Aggregation..."); s.Restart(); histogram3 = DoParallelAggregationPlinq(trialCount, mean, stdDev); s.Stop(); Console.WriteLine(SampleUtilities.FormattedTime(s.Elapsed)); PrintHistogram(histogram3); Console.WriteLine("\nRun complete... press enter to finish."); Console.ReadLine(); }
static void Main(string[] args) { Console.WriteLine("Image Blender Sample\n"); #if DEBUG Console.WriteLine("For most accurate timing results, use Release build.\n"); #endif string sourceDir = Directory.GetCurrentDirectory(); string file1 = "flowers.jpg"; // don't rotate string file2 = "dog.jpg"; // don't set to gray string destDir = Directory.GetCurrentDirectory(); if (args.Length > 0) { sourceDir = args[0]; } if (args.Length > 1) { file1 = args[1]; } if (args.Length > 2) { file2 = args[2]; } if (args.Length > 3) { destDir = args[3]; } string path1 = Path.Combine(sourceDir, file1); string path2 = Path.Combine(sourceDir, file2); SampleUtilities.CheckDirectoryExists(sourceDir); SampleUtilities.CheckFileExists(path1); SampleUtilities.CheckFileExists(path2); SampleUtilities.CheckDirectoryExists(destDir); // Load source images var source1 = new Bitmap(path1); var source2 = new Bitmap(path2); // Prepare for result image var layer1 = new Bitmap(source1.Width, source1.Height); // new layer apparently includes alpha layer... var layer2 = new Bitmap(source2.Width, source2.Height); // ... even when source does not. using (var result = new Bitmap(source1.Width, source1.Height)) { var blender = Graphics.FromImage(result); blender.CompositingMode = CompositingMode.SourceOver; // NOT SourceCopy mode // Sequential SampleUtilities.TimedRun(() => SeqentialImageProcessing(source1, source2, layer1, layer2, blender), " Sequential"); // restore layers to iniital condition; layer2 must be unrotated layer1 = new Bitmap(source1.Width, source1.Height); layer2 = new Bitmap(source2.Width, source2.Height); // Parallel tasks SampleUtilities.TimedRun(() => ParallelTaskImageProcessing(source1, source2, layer1, layer2, blender), " Parallel tasks"); // restore layers to initial condition; layer2 must be unrotated layer1 = new Bitmap(source1.Width, source1.Height); layer2 = new Bitmap(source2.Width, source2.Height); // Parallel invoke SampleUtilities.TimedRun(() => ParallelInvokeImageProcessing(source1, source2, layer1, layer2, blender), " Parallel invoke"); // Save blended image in file result.Save(Path.Combine(destDir, "blended.jpg")); // Show blended image on screen, pause until user closes window Console.WriteLine("Close image window to exit program."); using (var form = new Form()) // ensure disposal, prevent warning CA2000 { using (var pb = new PictureBox()) { pb.SizeMode = PictureBoxSizeMode.AutoSize; // fit to image - but form is initially smaller pb.Image = result; form.Controls.Add(pb); form.ShowDialog(); } } } // using result }