/// <summary>Initializes the partitioning methods list view.</summary> private void InitializePartitioningMethods() { lvPartitioningMethods.Items.Clear(); bool usingPLINQ = rbPLINQ.Checked; var partitioningMethods = new List <Tuple <string, Func <int[], Partitioner <int> > > >(); // Static partitioning using the Partitioner.Create overload requires static partitioner support, // which Parallel.ForEach does not provide. if (usingPLINQ) { partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( "Static", e => Partitioner.Create(e, false))); } // Add a bunch of partitioning approaches that work with both PLINQ and Parallel.ForEach partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( "Load Balance", e => Partitioner.Create(e, true))); partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( "Dynamic(1)", e => ChunkPartitioner.Create(e, 1))); partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( "Dynamic(16)", e => ChunkPartitioner.Create(e, 16))); partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( "Guided", e => ChunkPartitioner.Create(e, prev => { if (prev <= 0) { return(e.Length <= 1 ? 1 : e.Length / (Environment.ProcessorCount * 3)); } var next = prev / 2; return(next <= 0 ? prev : next); }))); partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( "Grow Exponential", e => ChunkPartitioner.Create(e, prev => prev <= 0 ? 1 : prev * 2))); partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( "Random", e => ChunkPartitioner.Create(e, prev => _localRandom.Value.Next(e.Length)))); // Special-case some PLINQ-only hashing if (usingPLINQ) { // The actual enabling of these partitioning schemes is done later, as they can't // be encoded in a partitioner but rather are based on what operators are used in the PLINQ query. partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( PartitioningStripe, e => Partitioner.Create(e))); partitioningMethods.Add(Tuple.Create <string, Func <int[], Partitioner <int> > >( PartitioningHash, e => Partitioner.Create(e))); } // Dump the partitioners into the list view foreach (var method in partitioningMethods) { lvPartitioningMethods.Items.Add(new ListViewItem(method.Item1) { Tag = method }); } lvPartitioningMethods.Items[0].Selected = true; }
static void DoWork(DataSource OGRDataSource, DataSet OSMDataSet, string MSSQLConnectionString, int SQLThreads, int SQLBatchSize) { ChunkPartitioner <DataTable> bufferEnumerator = new ChunkPartitioner <DataTable>(ReadData(OGRDataSource, OSMDataSet, SQLBatchSize), 1); ParallelOptions pOptions = new ParallelOptions(); pOptions.MaxDegreeOfParallelism = SQLThreads; try { Parallel.ForEach(bufferEnumerator, pOptions, buffer => { WriteBuffer(buffer, MSSQLConnectionString); }); } catch (Exception e) { log(TraceLevel.Error, e.Message); Environment.Exit(1); } }
static void DoWork(DataSource OGRDataSource, DataSet OSMDataSet, string MSSQLConnectionString, int SQLThreads, int SQLBatchSize) { ChunkPartitioner<DataTable> bufferEnumerator = new ChunkPartitioner<DataTable>(ReadData(OGRDataSource, OSMDataSet, SQLBatchSize), 1); ParallelOptions pOptions = new ParallelOptions(); pOptions.MaxDegreeOfParallelism = SQLThreads; try { Parallel.ForEach(bufferEnumerator, pOptions, buffer => { WriteBuffer(buffer, MSSQLConnectionString); }); } catch (Exception e) { log(TraceLevel.Error, e.Message); Environment.Exit(1); } }
public void Test_Solutions_Solvable() { SudokuOptions.Current.ShowAllSolutions = false; SudokuOptions.Current.IncludeBoxes = true; FileInfo[] files = new DirectoryInfo(Directories.Solutions).GetFiles(FileExtensions.XmlZipMask); ProgressIndicator pi = new ProgressIndicator(System.Reflection.MethodBase.GetCurrentMethod().Name); var solutions1 = (from file in files.AsParallel() select new { intermediate_solution = SudokuIntermediateSolution.LoadFromFile(file.FullName), fileName = file.FullName }).ToArray(); var solutions2 = (from sol in solutions1.AsParallel() select new[] { new { intermediate_solution = sol.intermediate_solution, fileName = sol.fileName, rotate = 0 }, new { intermediate_solution = sol.intermediate_solution.Rotate(), fileName = sol.fileName, rotate = 1 }, new { intermediate_solution = sol.intermediate_solution.Rotate().Rotate(), fileName = sol.fileName, rotate = 2 }, new { intermediate_solution = sol.intermediate_solution.Rotate().Rotate().Rotate(), fileName = sol.fileName, rotate = 3 } }).SelectMany().ToArray(); var solutions3 = (from sol in solutions2 group sol by sol.intermediate_solution.Solution.Type into gr select gr.ToArray()).ToArray(); ConcurrentBag <string> unsolvable = new ConcurrentBag <string>(); int count = solutions3.Sum(gr => gr.Count()); ConcurrentCounter counter = new ConcurrentCounter(); Parallel.ForEach(ChunkPartitioner.Create(solutions3.SelectMany(), 10), (solution) => { if (!solution.intermediate_solution.Test(true)) { unsolvable.Add("unsolvable; rotate_" + solution.rotate + "; " + solution.fileName); } counter.Increment(); pi.AddLine((counter.Value * 100 / count).ToString()); }); if (unsolvable.Count > 0) { foreach (var file in unsolvable) { TestContext.WriteLine(file); } Assert.Fail(); } }