private int OnExecute() { InputFile = Path.GetFullPath(InputFile); if (OutputFile != null) { OutputFile = Path.GetFullPath(OutputFile); OutputFile = FixPathSlashes(OutputFile); } if (FileFilter != null) { FileFilter = FixPathSlashes(FileFilter); } if (ExtFilter != null) { ExtFilterList = ExtFilter.Split(','); } var paths = new List <string>(); if (Directory.Exists(InputFile)) { if (OutputFile != null && File.Exists(OutputFile)) { Console.Error.WriteLine("Output path is an existing file, but input is a folder."); return(1); } IsInputFolder = true; var dirs = Directory .EnumerateFiles(InputFile, "*.*", RecursiveSearch ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly) .Where(s => s.EndsWith("_c") || s.EndsWith(".vcs")); if (!dirs.Any()) { Console.Error.WriteLine( "Unable to find any \"_c\" compiled files in \"{0}\" folder.{1}", InputFile, RecursiveSearch ? " Did you mean to include --recursive parameter?" : string.Empty); return(1); } paths.AddRange(dirs); } else if (File.Exists(InputFile)) { if (RecursiveSearch) { Console.Error.WriteLine("File passed in with --recursive option. Either pass in a folder or remove --recursive."); return(1); } paths.Add(InputFile); } else { Console.Error.WriteLine("Input \"{0}\" is not a file or a folder.", InputFile); return(1); } CurrentFile = 0; TotalFiles = paths.Count; if (MaxParallelismThreads > 1) { Console.WriteLine("Will use {0} threads concurrently.", MaxParallelismThreads); var partitioner = Partitioner.Create(paths, EnumerablePartitionerOptions.NoBuffering); Parallel.ForEach(partitioner, new ParallelOptions { MaxDegreeOfParallelism = MaxParallelismThreads }, (path, state) => { ProcessFile(path); }); } else { foreach (var path in paths) { ProcessFile(path); } } if (CollectStats) { Console.WriteLine(); Console.WriteLine("Processed resource stats:"); foreach (var stat in stats.OrderByDescending(x => x.Value.Count).ThenBy(x => x.Key)) { var info = stat.Value.Info != string.Empty ? string.Format(" ({0})", stat.Value.Info) : string.Empty; Console.WriteLine($"{stat.Value.Count,5} resources of version {stat.Value.Version} and type {stat.Value.Type}{info}"); foreach (var file in stat.Value.FilePaths) { Console.WriteLine($"\t\t{file}"); } } Console.WriteLine(); Console.WriteLine("Unique special dependancies:"); foreach (var stat in uniqueSpecialDependancies) { Console.WriteLine("{0} in {1}", stat.Key, stat.Value); } } return(0); }
public static void Epsimp_calculate(int NI, int NJ, int NK) { int range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //computation of the new turbulent kinetic energy // Parallel.For(2, NI, Program.pOptions, i => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; double CMU = Program.CMU; double CEPSILON1 = Program.CEPSILON1; double CEPSILON2 = Program.CEPSILON2; for (int i = range.Item1; i < range.Item2; i++) { for (int j = 2; j <= NJ - 1; j++) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel // Parallel.For(2, NI, Program.pOptions, i1 => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int i1 = range.Item1; i1 < range.Item2; i1++) { int i = NI - i1 + 1; for (int j = NJ - 1; j >= 2; j--) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2 + 6); range_parallel = Math.Max(36 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //Parallel.For(2, NI, Program.pOptions, i1 => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int i1 = range.Item1; i1 < range.Item2; i1++) { int i = NI - i1 + 1; for (int j = 2; j <= NJ - 1; j++) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2 + 3); range_parallel = Math.Max(33 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //Parallel.For(2, NI, Program.pOptions, i => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int i = range.Item1; i < range.Item2; i++) { for (int j = NJ - 1; j >= 2; j--) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel //Parallel.For(2, NJ, Program.pOptions, j => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j = range.Item1; j < range.Item2; j++) { for (int i = 2; i <= NI - 1; i++) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel //Parallel.For(2, NJ, Program.pOptions, j1 => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j1 = range.Item1; j1 < range.Item2; j1++) { int j = NJ - j1 + 1; for (int i = NI - 1; i >= 2; i--) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2 + 6); range_parallel = Math.Max(36 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel // Parallel.For(2, NJ, Program.pOptions, j => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j = range.Item1; j < range.Item2; j++) { for (int i = NI - 1; i >= 2; i--) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2 + 3); range_parallel = Math.Max(33 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel //Parallel.For(2, NJ, Program.pOptions, j1 => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j1 = range.Item1; j1 < range.Item2; j1++) { int j = NJ - j1 + 1; for (int i = 2; i <= NI - 1; i++) { for (int k = 1; k <= NK - 1; k++) { //Production-terms of turbulent kinetic energy double DUDX = (Program.U[i + 1][j][k] - Program.U[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DVDX = (Program.V[i + 1][j][k] - Program.V[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DWDX = (Program.W[i + 1][j][k] - Program.W[i - 1][j][k]) / (Program.ZAX[i] + Program.ZAX[i - 1]); double DUDY = (Program.U[i][j + 1][k] - Program.U[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DVDY = (Program.V[i][j + 1][k] - Program.V[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DWDY = (Program.W[i][j + 1][k] - Program.W[i][j - 1][k]) / (Program.ZAY[j] + Program.ZAY[j - 1]); double DUDZ = 0; double DVDZ = 0; double DWDZ = 0; double DTDZ = 0; if (k > 1) { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k - 1]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k - 1]) - 0.00065; } else { DUDZ = (Program.U[i][j][k + 1] - Program.U[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DVDZ = (Program.V[i][j][k + 1] - Program.V[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DWDZ = (Program.W[i][j][k + 1] - Program.W[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]); DTDZ = (Program.T[i][j][k + 1] - Program.T[i][j][k]) / (Program.ZSP[i][j][k + 1] + Program.ZSP[i][j][k]) - 0.00065; } //production-terms for the dissipation rate double PSTRESS = Program.VISV[i][j][k] * (0.5F * (DUDX * DUDX + DVDY * DVDY + DWDZ * DWDZ) + (DUDY + DVDX) * (DUDY + DVDX) + (DUDZ + DWDX) * (DUDZ + DWDX) + (DVDZ + DWDY) * (DVDZ + DWDY)) * Program.RHO[i][j][k]; double PBUOY = -Program.VISV[i][j][k] * DTDZ * Program.GERD / (Program.T[i][j][k] + Program.TBZ1) / Program.PRTE * Program.RHO[i][j][k]; double DIM = Program.AWEST_PS[i][j][k] * Program.DISSN[i - 1][j][k] + Program.ASOUTH_PS[i][j][k] * Program.DISSN[i][j - 1][k] + Program.AEAST_PS[i][j][k] * Program.DISSN[i + 1][j][k] + Program.ANORTH_PS[i][j][k] * Program.DISSN[i][j + 1][k] + Program.AP0_PS[i][j][k] * Program.DISS[i][j][k] + (Program.DISS[i][j][k] / Math.Max(Math.Abs(Program.TE[i][j][k]), 0.01) * (CEPSILON1 * PSTRESS + 0.4 * PBUOY - CEPSILON2 * Program.DISS[i][j][k] * Program.RHO[i][j][k])) * Program.VOL[i][j][k]; //Recurrence formula if (k > 1) { help = 1 / (Program.A_PS[i][j][k] - Program.C_PS[i][j][k] * PIM[k - 1]); PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = (DIM + Program.C_PS[i][j][k] * QIM[k - 1]) * help; } else { help = 1 / Program.A_PS[i][j][k]; PIM[k] = Program.B_PS[i][j][k] * help; QIM[k] = DIM * help; } } //Obtain new TKE-components for (int k = NK - 1; k >= 1; k--) { help = Program.DISSN[i][j][k]; help += (Program.RELAXT * (PIM[k] * Program.DISSN[i][j][k + 1] + QIM[k] - help)); Program.DISSN[i][j][k] = help; } } } }); }
public static void RangePartitionerDynamicChunking(long from, long count, long rangeSize) { long to = from + count; var partitioner = (rangeSize == -1) ? Partitioner.Create(from, to) : Partitioner.Create(from, to, rangeSize); // Check static partitions var partitions = partitioner.GetDynamicPartitions(); var partition1 = partitions.GetEnumerator(); var partition2 = partitions.GetEnumerator(); // Initialize the from / to values from the first element if (!partition1.MoveNext()) { return; } Assert.Equal(from, partition1.Current.Item1); if (rangeSize == -1) { rangeSize = partition1.Current.Item2 - partition1.Current.Item1; } long nextExpectedFrom = partition1.Current.Item2; long nextExpectedTo = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); // Ensure that each partition gets one range only // we check this by alternating partitions asking for elements and make sure // that we get ranges in a sequence. If chunking were to happen then we wouldnt see a sequence long actualCount = partition1.Current.Item2 - partition1.Current.Item1; while (true) { if (!partition1.MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partition1.Current.Item1); Assert.Equal(nextExpectedTo, partition1.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition1.Current.Item2 - partition1.Current.Item1; if (!partition2.MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partition2.Current.Item1); Assert.Equal(nextExpectedTo, partition2.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition2.Current.Item2 - partition2.Current.Item1; if (!partition2.MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partition2.Current.Item1); Assert.Equal(nextExpectedTo, partition2.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition2.Current.Item2 - partition2.Current.Item1; if (!partition1.MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partition1.Current.Item1); Assert.Equal(nextExpectedTo, partition1.Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partition1.Current.Item2 - partition1.Current.Item1; } // Verifying that all items are there Assert.True(count == actualCount, "Must be equal."); }
public static void RangePartitionerChunking(int from, int count, int rangeSize) { int to = from + count; var partitioner = (rangeSize == -1) ? Partitioner.Create(from, to) : Partitioner.Create(from, to, rangeSize); // Check static partitions var partitions = partitioner.GetPartitions(2); // Initialize the from / to values from the first element if (!partitions[0].MoveNext()) { return; } Assert.Equal(from, partitions[0].Current.Item1); if (rangeSize == -1) { rangeSize = partitions[0].Current.Item2 - partitions[0].Current.Item1; } int nextExpectedFrom = partitions[0].Current.Item2; int nextExpectedTo = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); // Ensure that each partition gets one range only // we check this by alternating partitions asking for elements and make sure // that we get ranges in a sequence. If chunking were to happen then we wouldn't see a sequence int actualCount = partitions[0].Current.Item2 - partitions[0].Current.Item1; while (true) { if (!partitions[0].MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partitions[0].Current.Item1); Assert.Equal(nextExpectedTo, partitions[0].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[0].Current.Item2 - partitions[0].Current.Item1; if (!partitions[1].MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partitions[1].Current.Item1); Assert.Equal(nextExpectedTo, partitions[1].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[1].Current.Item2 - partitions[1].Current.Item1; if (!partitions[1].MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partitions[1].Current.Item1); Assert.Equal(nextExpectedTo, partitions[1].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[1].Current.Item2 - partitions[1].Current.Item1; if (!partitions[0].MoveNext()) { break; } Assert.Equal(nextExpectedFrom, partitions[0].Current.Item1); Assert.Equal(nextExpectedTo, partitions[0].Current.Item2); nextExpectedFrom = (nextExpectedFrom + rangeSize) > to ? to : (nextExpectedFrom + rangeSize); nextExpectedTo = (nextExpectedTo + rangeSize) > to ? to : (nextExpectedTo + rangeSize); actualCount += partitions[0].Current.Item2 - partitions[0].Current.Item1; } // Verifying that all items are there Assert.Equal(count, actualCount); }
/// <summary> /// Performs a drawing operation from this Layer to a target layer. /// </summary> /// <param name="target"></param> /// <param name="blend"></param> /// <param name="destX"></param> /// <param name="destY"></param> /// <param name="width"></param> /// <param name="height"></param> /// <param name="srcX"></param> /// <param name="srcY"></param> /// <param name="colorTint"></param> public void DrawOnto(PixelData target, BlendMode blend, int destX, int destY, int width, int height, int srcX, int srcY, ColorRgba colorTint) { if (colorTint == ColorRgba.White) { this.DrawOnto(target, blend, destX, destY, width, height, srcX, srcY); return; } if (width == -1) width = this.width; if (height == -1) height = this.height; int beginX = MathF.Max(0, -destX, -srcX); int beginY = MathF.Max(0, -destY, -srcY); int endX = MathF.Min(width, this.width, target.width - destX, this.width - srcX); int endY = MathF.Min(height, this.height, target.height - destY, this.height - srcY); if (endX - beginX < 1) return; if (endY - beginY < 1) return; ColorRgba clrSource; ColorRgba clrTarget; #if !DISABLE_ASYNC Parallel.ForEach(Partitioner.Create(beginX, endX), range => { for (int i = range.Item1; i < range.Item2; i++) { #else for (int i = beginX; i < endX; i++) { #endif for (int j = beginY; j < endY; j++) { int sourceN = srcX + i + this.width * (srcY + j); int targetN = destX + i + target.width * (destY + j); clrSource = this.data[sourceN] * colorTint; if (blend == BlendMode.Solid) { target.data[targetN] = clrSource; } else if (blend == BlendMode.Mask) { if (clrSource.A > 0) target.data[targetN] = this.data[sourceN]; } else if (blend == BlendMode.Add) { clrTarget = target.data[targetN]; float alphaTemp = (float)clrSource.A / 255.0f; target.data[targetN].R = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.R + clrSource.R * alphaTemp))); target.data[targetN].G = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.G + clrSource.G * alphaTemp))); target.data[targetN].B = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.B + clrSource.B * alphaTemp))); target.data[targetN].A = (byte)Math.Min(255, Math.Max(0, (int)clrTarget.A + (int)clrSource.A)); } else if (blend == BlendMode.Alpha) { clrTarget = target.data[targetN]; float alphaTemp = (float)clrSource.A / 255.0f; target.data[targetN].R = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.R * (1.0f - alphaTemp) + clrSource.R * alphaTemp))); target.data[targetN].G = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.G * (1.0f - alphaTemp) + clrSource.G * alphaTemp))); target.data[targetN].B = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.B * (1.0f - alphaTemp) + clrSource.B * alphaTemp))); target.data[targetN].A = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.A * (1.0f - alphaTemp) + clrSource.A))); } else if (blend == BlendMode.AlphaPre) { clrTarget = target.data[targetN]; float alphaTemp = (float)clrSource.A / 255.0f; target.data[targetN].R = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.R * (1.0f - alphaTemp) + clrSource.R))); target.data[targetN].G = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.G * (1.0f - alphaTemp) + clrSource.G))); target.data[targetN].B = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.B * (1.0f - alphaTemp) + clrSource.B))); target.data[targetN].A = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrTarget.A * (1.0f - alphaTemp) + clrSource.A))); } else if (blend == BlendMode.Multiply) { clrTarget = target.data[targetN]; float clrTempR = (float)clrTarget.R / 255.0f; float clrTempG = (float)clrTarget.G / 255.0f; float clrTempB = (float)clrTarget.B / 255.0f; float clrTempA = (float)clrTarget.A / 255.0f; target.data[targetN].R = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.R * clrTempR))); target.data[targetN].G = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.G * clrTempG))); target.data[targetN].B = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.B * clrTempB))); target.data[targetN].A = (byte)Math.Min(255, Math.Max(0, (int)clrTarget.A + (int)clrSource.A)); } else if (blend == BlendMode.Light) { clrTarget = target.data[targetN]; float clrTempR = (float)clrTarget.R / 255.0f; float clrTempG = (float)clrTarget.G / 255.0f; float clrTempB = (float)clrTarget.B / 255.0f; float clrTempA = (float)clrTarget.A / 255.0f; target.data[targetN].R = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.R * clrTempR + clrTarget.R))); target.data[targetN].G = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.G * clrTempG + clrTarget.G))); target.data[targetN].B = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.B * clrTempB + clrTarget.B))); target.data[targetN].A = (byte)Math.Min(255, Math.Max(0, (int)clrTarget.A + (int)clrSource.A)); } else if (blend == BlendMode.Invert) { clrTarget = target.data[targetN]; float clrTempR = (float)clrTarget.R / 255.0f; float clrTempG = (float)clrTarget.G / 255.0f; float clrTempB = (float)clrTarget.B / 255.0f; float clrTempA = (float)clrTarget.A / 255.0f; float clrTempR2 = (float)clrSource.R / 255.0f; float clrTempG2 = (float)clrSource.G / 255.0f; float clrTempB2 = (float)clrSource.B / 255.0f; float clrTempA2 = (float)clrSource.A / 255.0f; target.data[targetN].R = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.R * (1.0f - clrTempR) + clrTarget.R * (1.0f - clrTempR2)))); target.data[targetN].G = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.G * (1.0f - clrTempG) + clrTarget.G * (1.0f - clrTempG2)))); target.data[targetN].B = (byte)Math.Min(255, Math.Max(0, (int)Math.Round(clrSource.B * (1.0f - clrTempB) + clrTarget.B * (1.0f - clrTempB2)))); target.data[targetN].A = (byte)Math.Min(255, Math.Max(0, (int)(clrTarget.A + clrSource.A))); } } } #if !DISABLE_ASYNC }); #endif }
/// <summary> /// Generates the user types. /// </summary> private void GenerateUserTypes() { // Verify that included files exist if (!string.IsNullOrEmpty(xmlConfig.GeneratedAssemblyName)) { foreach (var file in includedFiles) { if (!File.Exists(file.Path)) { throw new FileNotFoundException("Included file not found", file.Path); } } } // Loading modules Module[] modulesArray = new Module[xmlModules.Length]; logger.Write("Loading modules..."); Parallel.For(0, xmlModules.Length, (i) => { Module module = moduleProvider.Open(xmlModules[i]); modulesArray[i] = module; }); Dictionary <Module, XmlModule> modules = new Dictionary <Module, XmlModule>(); for (int i = 0; i < modulesArray.Length; i++) { modules.Add(modulesArray[i], xmlModules[i]); } logger.WriteLine(" {0}", stopwatch.Elapsed); // Enumerating symbols Symbol[][] globalTypesPerModule = new Symbol[xmlModules.Length][]; logger.Write("Enumerating symbols..."); Parallel.For(0, xmlModules.Length, (i) => { XmlModule xmlModule = xmlModules[i]; Module module = modulesArray[i]; string moduleName = xmlModule.Name; string nameSpace = xmlModule.Namespace; HashSet <Symbol> symbols = new HashSet <Symbol>(); foreach (var type in typeNames) { Symbol[] foundSymbols = module.FindGlobalTypeWildcard(type.NameWildcard); if (foundSymbols.Length == 0) { errorLogger.WriteLine("Symbol not found: {0}", type.Name); } else { foreach (Symbol symbol in foundSymbols) { symbols.Add(symbol); } } if (type.ExportDependentTypes) { foreach (Symbol symbol in foundSymbols) { symbol.ExtractDependentSymbols(symbols, xmlConfig.Transformations); } } } if (symbols.Count == 0) { foreach (Symbol symbol in module.GetAllTypes()) { symbols.Add(symbol); } } globalTypesPerModule[i] = symbols.Where(t => t.Tag == CodeTypeTag.Class || t.Tag == CodeTypeTag.Structure || t.Tag == CodeTypeTag.Union || t.Tag == CodeTypeTag.Enum).ToArray(); // Cache global scope if (generationOptions.HasFlag(UserTypeGenerationFlags.InitializeSymbolCaches)) { var globalScope = module.GlobalScope; globalScope.InitializeCache(); } }); List <Symbol> allSymbols = globalTypesPerModule.SelectMany(ss => ss).ToList(); logger.WriteLine(" {0}", stopwatch.Elapsed); // Initialize symbol fields and base classes if (generationOptions.HasFlag(UserTypeGenerationFlags.InitializeSymbolCaches)) { logger.Write("Initializing symbol values..."); Parallel.ForEach(Partitioner.Create(allSymbols), symbol => symbol.InitializeCache()); logger.WriteLine(" {0}", stopwatch.Elapsed); } logger.Write("Deduplicating symbols..."); // Group duplicated symbols Dictionary <string, List <Symbol> > symbolsByName = new Dictionary <string, List <Symbol> >(); Dictionary <Symbol, List <Symbol> > duplicatedSymbols = new Dictionary <Symbol, List <Symbol> >(); foreach (var symbol in allSymbols) { List <Symbol> symbols; if (!symbolsByName.TryGetValue(symbol.Name, out symbols)) { symbolsByName.Add(symbol.Name, symbols = new List <Symbol>()); } bool found = false; for (int i = 0; i < symbols.Count; i++) { Symbol s = symbols[i]; if (s.Size != 0 && symbol.Size != 0 && s.Size != symbol.Size) { #if DEBUG logger.WriteLine("{0}!{1} ({2}) {3}!{4} ({5})", s.Module.Name, s.Name, s.Size, symbol.Module.Name, symbol.Name, symbol.Size); #endif continue; } if (s.Size == 0 && symbol.Size != 0) { List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(s, out duplicates)) { duplicatedSymbols.Add(s, duplicates = new List <Symbol>()); } duplicatedSymbols.Remove(s); duplicates.Add(s); duplicatedSymbols.Add(symbol, duplicates); symbols.Remove(s); symbols.Add(symbol); } else { List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(s, out duplicates)) { duplicatedSymbols.Add(s, duplicates = new List <Symbol>()); } duplicates.Add(symbol); } found = true; break; } if (!found) { symbols.Add(symbol); } } // Unlink duplicated symbols if two or more are named the same foreach (var symbols in symbolsByName.Values) { if (symbols.Count <= 1) { continue; } for (int i = 0, n = symbols.Count; i < n; i++) { Symbol s = symbols[i]; List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(s, out duplicates)) { continue; } symbols.AddRange(duplicates); duplicatedSymbols.Remove(s); } } // Extracting deduplicated symbols Dictionary <string, Symbol[]> deduplicatedSymbols = new Dictionary <string, Symbol[]>(); Dictionary <Symbol, string> symbolNamespaces = new Dictionary <Symbol, string>(); foreach (var symbols in symbolsByName.Values) { if (symbols.Count != 1 || modules.Count == 1) { foreach (var s in symbols) { symbolNamespaces.Add(s, modules[s.Module].Namespace); } } else { Symbol symbol = symbols.First(); List <Symbol> duplicates; if (!duplicatedSymbols.TryGetValue(symbol, out duplicates)) { duplicates = new List <Symbol>(); } duplicates.Insert(0, symbol); deduplicatedSymbols.Add(symbol.Name, duplicates.ToArray()); foreach (var s in duplicates) { symbolNamespaces.Add(s, xmlConfig.CommonTypesNamespace); } } } var globalTypes = symbolsByName.SelectMany(s => s.Value).ToArray(); logger.WriteLine(" {0}", stopwatch.Elapsed); logger.WriteLine(" Total symbols: {0}", globalTypesPerModule.Sum(gt => gt.Length)); logger.WriteLine(" Unique symbol names: {0}", symbolsByName.Count); logger.WriteLine(" Dedupedlicated symbols: {0}", globalTypes.Length); // Initialize GlobalCache with deduplicatedSymbols GlobalCache.Update(deduplicatedSymbols); // Collecting types logger.Write("Collecting types..."); foreach (var module in modules.Keys) { userTypes.Add(userTypeFactory.AddSymbol(module.GlobalScope, null, modules[module].Namespace, generationOptions)); } ConcurrentBag <Symbol> simpleSymbols = new ConcurrentBag <Symbol>(); Dictionary <Tuple <string, string>, List <Symbol> > templateSymbols = new Dictionary <Tuple <string, string>, List <Symbol> >(); Parallel.ForEach(Partitioner.Create(globalTypes), (symbol) => { string symbolName = symbol.Name; // TODO: Add configurable filter // if (symbolName.StartsWith("$") || symbolName.StartsWith("__vc_attributes") || symbolName.Contains("`anonymous-namespace'") || symbolName.Contains("`anonymous namespace'") || symbolName.Contains("::$") || symbolName.Contains("`")) { return; } // Do not handle template referenced arguments if (symbolName.Contains("&")) { // TODO: Convert this to function pointer return; } // TODO: For now remove all unnamed-type symbols string scopedClassName = symbol.Namespaces.Last(); if (scopedClassName.StartsWith("<") || symbolName.Contains("::<")) { return; } // Check if symbol contains template type. if ((symbol.Tag == CodeTypeTag.Class || symbol.Tag == CodeTypeTag.Structure || symbol.Tag == CodeTypeTag.Union) && SymbolNameHelper.ContainsTemplateType(symbolName)) { List <string> namespaces = symbol.Namespaces; string className = namespaces.Last(); var symbolId = Tuple.Create(symbolNamespaces[symbol], SymbolNameHelper.CreateLookupNameForSymbol(symbol)); lock (templateSymbols) { if (!templateSymbols.ContainsKey(symbolId)) { templateSymbols[symbolId] = new List <Symbol>() { symbol } } ; else { templateSymbols[symbolId].Add(symbol); } } // TODO: // Do not add physical types for template specialization (not now) // do if types contains static fields // nested in templates } else { simpleSymbols.Add(symbol); } }); logger.WriteLine(" {0}", stopwatch.Elapsed); // Populate Templates logger.Write("Populating templates..."); foreach (List <Symbol> symbols in templateSymbols.Values) { Symbol symbol = symbols.First(); userTypes.AddRange(userTypeFactory.AddTemplateSymbols(symbols, symbolNamespaces[symbol], generationOptions)); } logger.WriteLine(" {0}", stopwatch.Elapsed); // Specialized class logger.Write("Populating specialized classes..."); foreach (Symbol symbol in simpleSymbols) { userTypes.Add(userTypeFactory.AddSymbol(symbol, null, symbolNamespaces[symbol], generationOptions)); } logger.WriteLine(" {0}", stopwatch.Elapsed); // To solve template dependencies. Update specialization arguments once all the templates has been populated. logger.Write("Updating template arguments..."); foreach (TemplateUserType templateUserType in userTypes.OfType <TemplateUserType>()) { foreach (SpecializedTemplateUserType specializedTemplateUserType in templateUserType.Specializations) { if (!specializedTemplateUserType.UpdateTemplateArguments(userTypeFactory)) { #if DEBUG logger.WriteLine("Template user type cannot be updated: {0}", specializedTemplateUserType.Symbol.Name); #endif } } } logger.WriteLine(" {0}", stopwatch.Elapsed); // Post processing user types (filling DeclaredInType) logger.Write("Post processing user types..."); var namespaceTypes = userTypeFactory.ProcessTypes(userTypes, symbolNamespaces, xmlConfig.CommonTypesNamespace ?? modules.First().Key.Namespace).ToArray(); userTypes.AddRange(namespaceTypes); logger.WriteLine(" {0}", stopwatch.Elapsed); }
static void Main() { // Source must be array or IList. var source = Enumerable.Range(0, 100000).ToArray(); double[] results = new double[source.Length]; Console.WriteLine("Enter default partitioner code"); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); Parallel.ForEach(source, // source collection (j, loop) => // method invoked by the loop on each iteration { results[j] = source[j] * Math.PI; } // Method to be executed when each partition has completed. // finalResult is the final value of subtotal for a particular partition. ); stopwatch.Stop(); Console.Error.WriteLine("Default partitioner loop time in milliseconds: {0}", stopwatch.ElapsedMilliseconds); // Partition the entire source array. var rangePartitioner = Partitioner.Create(0, source.Length); Console.WriteLine("Enter partitioner code"); stopwatch.Reset(); stopwatch.Start(); // Loop over the partitions in parallel. Parallel.ForEach(rangePartitioner, (range, loopState) => { // Loop over each range element without a delegate invocation. for (int i = range.Item1; i < range.Item2; i++) { results[i] = source[i] * Math.PI; } }); stopwatch.Stop(); Console.Error.WriteLine("Partitioner loop time in milliseconds: {0}", stopwatch.ElapsedMilliseconds); // Partition the entire source array. int size = source.Length / 10; var chunkPartitioner = Partitioner.Create(0, source.Length, size); Console.WriteLine("Enter defined chunk partitioner code"); stopwatch.Reset(); stopwatch.Start(); // Loop over the partitions in parallel. Parallel.ForEach(chunkPartitioner, (range, loopState) => { // Loop over each range element without a delegate invocation. for (int i = range.Item1; i < range.Item2; i++) { results[i] = source[i] * Math.PI; } }); stopwatch.Stop(); Console.Error.WriteLine("Defined Chunk Partitioner loop time in milliseconds: {0}", stopwatch.ElapsedMilliseconds); Console.ReadKey(); }
public static void Main(string[] args) { Cons.WriteLine($"Start the LINQ Stuff.....{args?[0]}"); var champs = Formula1.GetChampions(); var q1 = from r in Formula1.GetChampions() where r.Country == "UK" orderby r.Wins descending select r; foreach (Racer r in q1) { Cons.WriteLine($"{r:A}"); } //Extension Method! Just another way to get at static classes! var s = "All"; s.Boots(); //Q1 using extensions methods to for show... var champs2 = new List <Racer>(Formula1.GetChampions()); IEnumerable <Racer> Brits = champs2.Where(c => c.Country == "UK").OrderBy(o => o.LastName) .Select(r => r); DeferredDemo.DeferredEx1(); //LINQ query examples var wins10plus = from r in champs2 where r.Wins >= 10 select r; var wins10Plus2 = Formula1.GetChampions().Where(r => r.Wins >= 10).Select(r => r).ToList(); //Example where one can't use LINQ and have to use the extension method. var r1 = champs2.Where((r, Index) => r.LastName.StartsWith("A") || Index % 2 != 0); //Type Filtering object[] data = { "One", 1, 2, 3, 4, "Five", "Six" }; var int1 = data.OfType <int>().OrderBy(n => n); foreach (var i in int1.Reverse()) { Cons.WriteLine($"{i}"); } //Compound froms var ferrariDrives = from r in Formula1.GetChampions() from c in r.Cars where c == "Ferrari" select r.LastName + " " + r.LastName + " => " + c; var Drives1950s = from r in Formula1.GetChampions() from y in r.Years where y >= 1950 && y <= 1959 orderby y select r.LastName + " " + r.LastName + " => " + y; //Using Exstensions var Drivers1960s = Formula1.GetChampions().SelectMany(r => r.Years, (r, y) => new { Racer = r, Year = y }) .Where(r => r.Year >= 1960 && r.Year <= 1969).OrderBy(r => r.Year).Select(r => r.Racer.LastName + " " + r.Racer.FirstName + " " + r.Year); //OrderBy and Take... var oRacers = (from r in Formula1.GetChampions() orderby r.Country, r.LastName, r.FirstName descending select r).Take(10); //And With Exrensions oRacers = Formula1.GetChampions().OrderBy(r => r.Wins).ThenBy(r => r.LastName) .ThenByDescending(r => r.FirstName).Take(10); //Now some grouping var gRacers = from r in Formula1.GetChampions() group r by r.Country into c orderby c.Key select c; var gRacers2 = from r in Formula1.GetChampions() group r by r.Country into c orderby c.Key, c.Count() descending where c.Count() >= 2 select new { Country = c.Key, Count = c.Count() }; //Grouping with Nested Objects basicaly c from gRacers var gRacers3 = from r in Formula1.GetChampions() group r by r.Country into c orderby c.Key, c.Count() descending where c.Count() >= 2 select new { Country = c.Key, Count = c.Count(), Racers = from r2 in c orderby r2.Wins select new { r2.LastName, r2.Wins } }; //Inner Joins var ijRacers = from r in Formula1.GetChampions() from y in r.Years select new { Year = y, Name = r.LastName + ", " + r.FirstName }; var ijTeams = from t in F1_Teams.GetConstructorChampions() from y in t.Years select new { Year = y, Name = t.Name }; var ijRacerAndTeam = (from r in ijRacers join t in ijTeams on r.Year equals t.Year orderby r.Year select new { r.Year, Champion = r.Name, Constructor = t.Name } ).Take(10); var loRacerAndTeam = (from r in ijRacers join t in ijTeams on r.Year equals t.Year into rt from a in rt.DefaultIfEmpty() orderby r.Year select new { r.Year, Champion = r.Name, Constructor = a == null ? "Not One": a.Name } ).Take(10); //Group Join Step 1 flattern! var gjRacers = Formula1.GetChampionships().SelectMany(c => new List <RacerInfo>() { new RacerInfo { Year = c.Year, Position = 1, FirstName = c.Champion.FirstName(), LastName = c.Champion.LastName() }, new RacerInfo { Year = c.Year, Position = 2, FirstName = c.Second.FirstName(), LastName = c.Second.LastName() }, new RacerInfo { Year = c.Year, Position = 3, FirstName = c.Third.FirstName(), LastName = c.Third.LastName() } }).ToArray(); //Step 2 Join var q = (from r in Formula1.GetChampions() join ts in gjRacers on new { FirstName = r.FirstName, LastName = r.LastName } equals new { FirstName = ts.FirstName, LastName = ts.LastName } into yrResults select new { FirstName = r.FirstName, LastName = r.LastName, Wins = r.Wins, Starts = r.Starts, Results = yrResults } ).ToArray(); //Using Set Operators //Using a static method var ferrariDrivers = GetDrives("Ferrari"); var mcLarebDrivers = GetDrives("McLaren"); //Better way to use delegates! Func <string, IEnumerable <Racer> > racersByCar = car => from r in Formula1.GetChampions() from c in r.Cars where c == car orderby r.LastName select r; foreach (var r in racersByCar("Ferrari").Intersect(racersByCar("McLaren"))) { Cons.WriteLine(r.LastName); } //Zip method! var rNames = from r in Formula1.GetChampions() where r.Country == "Italy" orderby r.Wins descending select new { Name = r.FirstName + "," + r.LastName }; var rNameStarts = from r in Formula1.GetChampions() where r.Country == "Italy" orderby r.Wins descending select new { LastName = r.LastName, Starts = r.Starts }; var zRacers = rNames.Zip(rNameStarts, (a, b) => a.Name + ", Starts: " + b.Starts); foreach (var a in zRacers) { Cons.WriteLine(a.ToString()); } //Why the selections and order are so important! rNameStarts = from r in Formula1.GetChampions() where r.Country == "UK" orderby r.Wins select new { LastName = r.LastName, Starts = r.Starts }; zRacers = rNames.Zip(rNameStarts, (a, b) => a.Name + ", Starts: " + b.Starts); foreach (var a in zRacers) { Cons.WriteLine(a.ToString()); } //Complete crap lol //Partitioning int pSize = 20; int nPages = (int)Math.Ceiling(Formula1.GetChampions().Count / (double)pSize); for (int p = 0; p < nPages; p++) { Cons.WriteLine($"Page {p+1} of {nPages}:"); var pR = (from r in Formula1.GetChampions() orderby r.LastName, r.FirstName select r.LastName + ", " + r.FirstName).Skip(p * pSize).Take(pSize); foreach (var r in pR) { Cons.WriteLine(r); } Cons.ReadKey(); } //Aggregate Operators Count, Sum, Min, Max, Average and Aggregate return single values. //Count var aQ1 = from r in Formula1.GetChampions() let nYears = r.Years.Count() orderby nYears descending, r.LastName, r.FirstName select new { Name = r.LastName, Initials = r.FirstName.Initials(), Years = nYears }; foreach (var r in aQ1) { Cons.WriteLine($"{r.Name}, {r.Initials} Wins = {r.Years}"); } //Sum var t1 = from c in from r in Formula1.GetChampions() group r by r.Country select new { Key1 = c.Key }; var aS1 = (from c in from r in Formula1.GetChampions() group r by r.Country into c select new { Country = c.Key, Wins = (from r2 in c select r2.Wins).Sum() } orderby c.Wins descending, c.Country select c).Take(5); //Conversion Operators var lRacers = (from r in Formula1.GetChampions() from c in r.Cars select new { Car = c, Racer = r } ).ToLookup(cr => cr.Car, cr => cr.Racer); var lr = lRacers["Williams"]; //Gives us a lookup of just Williams drives //Using the Cast method! var oList = new System.Collections.ArrayList(Formula1.GetChampions() as System.Collections.ICollection ?? throw new InvalidOperationException()); var q2 = from r in oList.Cast <Racer>() where r.Country == "USA" select r; //Not sure what this is doing but it must be good!!!! //Generation Operators var vRange = Enumerable.Range(1, 20); var vRange2 = Enumerable.Range(1, 20).Select(i => i > 1? (i > 2? i * 2: i): i); //Parallel LINQ Cons.WriteLine($"Loading large dataset, please wait."); var lsData = LargeSample().ToList(); var watch = new Stopwatch(); watch.Start(); var pRes = (from x in lsData.AsParallel() where Math.Log(x) < 4 select x ).Average(); watch.Stop(); Cons.WriteLine($"Parallel run: {watch.Elapsed}"); watch.Reset(); watch.Start(); var nRes = (from x in lsData where Math.Log(x) < 4 select x ).Average(); watch.Stop(); Cons.WriteLine($"Normal run: {watch.Elapsed}"); watch.Reset(); watch.Start(); var mpRes = lsData.AsParallel().Where(x => Math.Log(x) < 4).Select(x => x).Average(); watch.Stop(); Cons.WriteLine($"Working Average with Parallel run: {watch.Elapsed}"); //Parallel LINQ //Partitions watch.Reset(); watch.Start(); var ppRes = (from x in Partitioner.Create(lsData, true).AsParallel().WithDegreeOfParallelism(4) where Math.Log(x) < 4 select x ).Average(); watch.Stop(); Cons.WriteLine($"Average with Degree of Parallelism set to 4: {watch.Elapsed}"); watch.Reset(); watch.Start(); var ppRes2 = (from x in Partitioner.Create(lsData, true).AsParallel().WithDegreeOfParallelism(8) where Math.Log(x) < 4 select x ).Average(); watch.Stop(); Cons.WriteLine($"Avergare Degree of Parallelism set to 8: {watch.Elapsed}"); watch.Reset(); watch.Start(); //Partitions // Candellations and how to do it.... var cts = new CancellationTokenSource(); //From using System.Threading; Task.Factory.StartNew(() => { try { Cons.WriteLine(); Cons.WriteLine("Cancellable Query Started!"); var cRes = (from x in lsData.AsParallel().WithCancellation(cts.Token) where Math.Log(x) < 4 select x).Average(); Cons.WriteLine($"Query not cancelled result: {cRes}"); } catch (OperationCanceledException cex) { Cons.WriteLine(); Cons.Write($"Cancell message, {cex.Message}"); } }); Cons.WriteLine("Cancell Query!"); cts.Cancel(); //Cons.Write($"Cancel ?"); //string input = Cons.ReadLine(); //if (input.ToLower().Equals("y")) // cts.Cancel(); // Candellations and how to do it.... //Expression Trees //Expression Trees Cons.Write($"Press anykey to finish and close."); Cons.ReadKey(); }
/// <summary> /// Selects an item (such as Max or Min). /// </summary> /// <param name="fromInclusive">Starting index of the loop.</param> /// <param name="toExclusive">Ending index of the loop</param> /// <param name="select">The function to select items over a subset.</param> /// <param name="reduce">The function to select the item of selection from the subsets.</param> /// <returns>The selected value.</returns> public static T Aggregate <T>(int fromInclusive, int toExclusive, Func <int, T> select, Func <T[], T> reduce) { if (select == null) { throw new ArgumentNullException(nameof(select)); } if (reduce == null) { throw new ArgumentNullException(nameof(reduce)); } // Special case: no action if (fromInclusive >= toExclusive) { return(reduce(Array.Empty <T>())); } // Special case: single action, inline if (fromInclusive == (toExclusive - 1)) { return(reduce(new[] { select(fromInclusive) })); } // Special case: straight execution without parallelism if (Control.MaxDegreeOfParallelism < 2) { var mapped = new T[toExclusive - fromInclusive]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k + fromInclusive); } return(reduce(mapped)); } // Common case var intermediateResults = new List <T>(); var syncLock = new object(); Parallel.ForEach( Partitioner.Create(fromInclusive, toExclusive), CreateParallelOptions(), () => new List <T>(), (range, _, localData) => { var mapped = new T[range.Item2 - range.Item1]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k + range.Item1); } localData.Add(reduce(mapped)); return(localData); }, localResult => { lock (syncLock) { intermediateResults.Add(reduce(localResult.ToArray())); } }); return(reduce(intermediateResults.ToArray())); }
public static void WatVap_calculate(int NI, int NJ, int NK) { //computation of temperature change due to latent heat int range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //computation of the new specific humidity //Parallel.For(2, NI, Program.pOptions, i => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { for (int i = range.Item1; i < range.Item2; i++) { for (int j = 2; j <= NJ - 1; j++) { double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; double[] TABS_L = Program.TABS[i][j]; double[] QUN_L = Program.QUN[i][j]; double[] TN_L = Program.TN[i][j]; float[] FACTOR_L = Program.FACTOR[i][j]; double WAT_VAP_integral = 0.0; for (int k = 1; k < NK; k++) { //saturation vapour pressure - Magnus formulae double EGSAT = 611.2 * Math.Exp(17.269 * (TABS_L[k] - 273.16) / (TABS_L[k] - 30.04)); //maximum water vapour content possible -> assuming condensation above 80% relative humidity double QGSAT = EGSAT / 461.5 / TABS_L[k]; //change in temperature if (QUN_L[k] > QGSAT * 1000.0) { //condensation double WATVAP_COND = (QUN_L[k] - QGSAT * 1000.0); TN_L[k] += WATVAP_COND * 2.5 * FACTOR_L[k]; WAT_VAP_L[k] += WATVAP_COND; QUN_L[k] -= WATVAP_COND; } else { //evaporation double WATVAP_EVA = Math.Min(WAT_VAP_L[k], QGSAT * 1000.0 - QUN_L[k]); TN_L[k] -= WATVAP_EVA * 2.5 * FACTOR_L[k]; WAT_VAP_L[k] -= WATVAP_EVA; WAT_VAP_L[k] = Math.Max(WAT_VAP_L[k], 0.0); QUN_L[k] += WATVAP_EVA; } WAT_VAP_integral += WAT_VAP_L[k]; } //Program.CLOUDS[i][j] = 0; if (WAT_VAP_integral > 1.0) { //Program.CLOUDS[i][j] = 1; } } } }); range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //computation of the new specific humidity //Parallel.For(2, NI, Program.pOptions, i => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int i = range.Item1; i < range.Item2; i++) { for (int j = 2; j <= NJ - 1; j++) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //Parallel.For(2, NI, Program.pOptions, i1 => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int i1 = range.Item1; i1 < range.Item2; i1++) { int i = NI - i1 + 1; for (int j = NJ - 1; j >= 2; j--) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2 + 6); range_parallel = Math.Max(36 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //Parallel.For(2, NI, Program.pOptions, i1 => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int i1 = range.Item1; i1 < range.Item2; i1++) { int i = NI - i1 + 1; for (int j = 2; j <= NJ - 1; j++) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); range_parallel = (int)(NI / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2 + 3); range_parallel = Math.Max(33 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NI, range_parallel); // if NI < range_parallel //Parallel.For(2, NI, Program.pOptions, i => Parallel.ForEach(Partitioner.Create(2, NI, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int i = range.Item1; i < range.Item2; i++) { for (int j = NJ - 1; j >= 2; j--) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel //Parallel.For(2, NJ, Program.pOptions, j => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j = range.Item1; j < range.Item2; j++) { for (int i = 2; i <= NI - 1; i++) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(30 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel //Parallel.For(2, NJ, Program.pOptions, j1 => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j1 = range.Item1; j1 < range.Item2; j1++) { int j = NJ - j1 + 1; for (int i = NI - 1; i >= 2; i--) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2); range_parallel = Math.Max(36 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel //Parallel.For(2, NJ, Program.pOptions, j => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j = range.Item1; j < range.Item2; j++) { for (int i = NI - 1; i >= 2; i--) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); range_parallel = (int)(NJ / Program.pOptions.MaxDegreeOfParallelism - (ITIME % 3) * 2 + 6); range_parallel = Math.Max(36 - (ITIME % 3) * 2, range_parallel); // min. 30 steps per processor range_parallel = Math.Min(NJ, range_parallel); // if NI < range_parallel //Parallel.For(2, NJ, Program.pOptions, j1 => Parallel.ForEach(Partitioner.Create(2, NJ, range_parallel), range => { double DIM; double[] PIM = new double[NK + 1]; double[] QIM = new double[NK + 1]; double help; for (int j1 = range.Item1; j1 < range.Item2; j1++) { int j = NJ - j1 + 1; for (int i = 2; i <= NI - 1; i++) { float[] AWEST_PS_L = Program.AWEST_PS[i][j]; float[] ASOUTH_PS_L = Program.ASOUTH_PS[i][j]; float[] AEAST_PS_L = Program.AEAST_PS[i][j]; float[] ANORTH_PS_L = Program.ANORTH_PS[i][j]; float[] AP0_PS_L = Program.AP0_PS[i][j]; double[] A_PS_L = Program.A_PS[i][j]; double[] B_PS_L = Program.B_PS[i][j]; double[] C_PS_L = Program.C_PS[i][j]; double[] WAT_VAPN_L = Program.WAT_VAPN[i][j]; double[] WAT_VAP_L = Program.WAT_VAP[i][j]; for (int k = 1; k <= NK - 1; k++) { DIM = AWEST_PS_L[k] * Program.WAT_VAPN[i - 1][j][k] + ASOUTH_PS_L[k] * Program.WAT_VAPN[i][j - 1][k] + AEAST_PS_L[k] * Program.WAT_VAPN[i + 1][j][k] + ANORTH_PS_L[k] * Program.WAT_VAPN[i][j + 1][k] + AP0_PS_L[k] * WAT_VAP_L[k]; //Recurrence formula if (k > 1) { help = 1 / (A_PS_L[k] - C_PS_L[k] * PIM[k - 1]); PIM[k] = B_PS_L[k] * help; QIM[k] = (DIM + C_PS_L[k] * QIM[k - 1]) * help; } else { help = 1 / A_PS_L[k]; PIM[k] = B_PS_L[k] * help; QIM[k] = DIM * help; } } //Obtain new Water vapour-components for (int k = NK - 1; k >= 1; k--) { help = WAT_VAPN_L[k]; help += (Program.RELAXT * (PIM[k] * WAT_VAPN_L[k + 1] + QIM[k] - help)); WAT_VAPN_L[k] = help; } } } }); }
internal static void Seed(CrutchContext context) { Trace.WriteLine("Performing database initialization"); var mainAccount = new Account { Name = "Entity" }; var otherAccount = new Account { Name = "Framework" }; var folders = new List <MessageFolder>(FoldersCount); var allThreads = new List <MessageThread>(ThreadsInEachFolder * FoldersCount); for (int i = 0; i < FoldersCount; i++) { var folder = new MessageFolder { Owner = mainAccount, Name = "Folder" + i, Threads = new List <MessageThread>(ThreadsInEachFolder) }; for (int j = 0; j < ThreadsInEachFolder; j++) { var thread = new MessageThread { Subject = string.Format("Thread {0} - {1}", i, j) }; folder.Threads.Add(thread); allThreads.Add(thread); } folders.Add(folder); } var messages = new List <Message>(MessagesCount); for (int i = 0; i < MessagesCount; i++) { messages.Add(new Message { Owner = mainAccount, Sender = otherAccount, Receiver = mainAccount, Date = DateTime.UtcNow, IsRead = false, Thread = allThreads[i % allThreads.Count], Text = "Message" + i }); } context.WithoutValidation().WithoutChangesDetection(); context.Accounts.Add(mainAccount); context.Accounts.Add(otherAccount); Trace.WriteLine("Adding folders"); for (int i = 0; i < FoldersCount; i++) { context.Folders.Add(folders[i]); context.SaveChanges(); } Parallel.ForEach(Partitioner.Create(0, MessagesCount, 100), range => { using (var tempContext = EfContextFactory.CreateContext().WithoutChangesDetection().WithoutValidation()) { tempContext.Entry(mainAccount).State = System.Data.EntityState.Unchanged; tempContext.Entry(otherAccount).State = System.Data.EntityState.Unchanged; for (int i = range.Item1; i < range.Item2; i++) { tempContext.Entry(messages[i].Thread).State = System.Data.EntityState.Unchanged; tempContext.Messages.Add(messages[i]); } tempContext.SaveChanges(); } Trace.WriteLine(string.Format("Adding messages {0} - {1}", range.Item1, range.Item2)); }); context.SaveChanges(); GC.Collect(2, GCCollectionMode.Forced, true); }
public void Render(int width, int height, CancellationTokenSource tokenSource) { var stopwatch = new Stopwatch(); stopwatch.Start(); // reinit buffered values Buffer = new int[width, height]; double yMin = _position.YMin; double xMin = _position.XMin; double stepX = _position.XDiff / width; double stepY = _position.YDiff / height; int maxIterations = _maxIterations; int indexMaxIterations = _colorMap.IndexMaxIterations; int colorMapLength = _colorMap.Colors.Length; // local copy of color map int[] localColorMap = new int[colorMapLength]; Array.Copy(_colorMap.Colors, localColorMap, colorMapLength); var options = new ParallelOptions { CancellationToken = tokenSource.Token }; // start rendering from the center var midToTop = RangeMinMaxInclusive(0, GetIndex(height, 0.5, true)).Reverse(); var midToBot = RangeMinMaxInclusive(GetIndex(height, 0.5, false), height - 1); var source = midToTop.Concat(midToBot).ToArray(); OrderablePartitioner <int> partitioner = Partitioner.Create(source, true); try { ParallelLoopResult unused = Parallel.ForEach(partitioner, options, () => 0, (y, state, threadLocal) => { double zRe, zIm, zReSq, zImSq, zReTmp; int iterations; // imaginary axes step double cIm = yMin + y * stepY; // real axes double cRe = xMin; for (var x = 0; x < width; x++) { // reset maxIterations and z zRe = 0; zIm = 0; iterations = 0; zReSq = zRe * zRe; zImSq = zIm * zIm; // iterate until the max number of maxIterations was not reached // or the value's magnitude falls below 2 while (iterations < maxIterations && zReSq + zImSq < 4) { // z = z^2 + c zReTmp = zRe; zRe = zReSq - zImSq + cRe; zIm = zReTmp * zIm; zIm = zIm + zIm + cIm; zReSq = zRe * zRe; zImSq = zIm * zIm; iterations++; } if (iterations < maxIterations) { // smooth coloring algorithm double zLog = Math.Log(zReSq + zImSq) / 2; var nu = (int)(Math.Log(zLog * ONE_OVER_LOG_TWO) * ONE_OVER_LOG_TWO); iterations = iterations + 1 - nu; // directly write to pixel buffer _fastImage.SetPixel(_pixelBufferPtr, x, y, localColorMap[iterations % colorMapLength]); } else { iterations = Int32.MaxValue; // write a max iteration pixel _fastImage.SetPixel(_pixelBufferPtr, x, y, localColorMap[indexMaxIterations]); } // save for further access Buffer[x, y] = iterations; // real axes step cRe += stepX; } return(threadLocal); }, threadLocal => { _fastImage.Dirty(); Trace.WriteLine("=> Dirty requested."); }); } catch (OperationCanceledException) { Trace.WriteLine("=> ## Aborted calculation."); } stopwatch.Stop(); _fastImage.Dirty(); Trace.WriteLine($"=> Calculation took {stopwatch.ElapsedMilliseconds} milliseconds."); }
static void Main(string[] args) { Console.WriteLine("Press any key to start"); Console.ReadKey(); Stopwatch sw = new Stopwatch(); long total = 0; ResetGC(); #region Synchronic Parallel.Invoke(() => { sw.Start(); for (int i = 0; i < ITERATIONS; i++) { total += i; } sw.Stop(); }); Console.WriteLine("Synchronic: \r\n\tduration = {0:N0}, \ttotal = {1}", sw.ElapsedMilliseconds, total); #endregion // Synchronic total = 0; ResetGC(); #region Parallel.For // sub total with delegate for each iteration sw.Restart(); Parallel.For(0, ITERATIONS, () => 0L, (i, state, local) => local + i, local => total += local); sw.Stop(); Console.WriteLine("Parallel.For: \r\n\tduration = {0:N0},\ttotal = {1}", sw.ElapsedMilliseconds, total); #endregion // Parallel.For total = 0; ResetGC(); #region Partitioner // sub total with delegate for each core sw.Restart(); var partitioner = Partitioner.Create(0, ITERATIONS, ITERATIONS / Environment.ProcessorCount); Parallel.ForEach(partitioner, tuple => { long localTotal = 0; for (int i = tuple.Item1; i < tuple.Item2; i++) { localTotal += i; } lock (partitioner) { total += localTotal; } }); sw.Stop(); Console.WriteLine("Partitioner: \r\n\tduration = {0:N0}, \ttotal = {1}", sw.ElapsedMilliseconds, total); #endregion // Partitioner //Console.ReadKey(); }
internal static async Task DownloadImdbMetadataAsync(string directory, int level = 2, bool overwrite = false, bool useCache = false, bool useBrowser = false, bool isTV = false, Action <string>?log = null) { log ??= TraceLog; string[] movies = EnumerateDirectories(directory, level).ToArray(); Task[] tasks = Partitioner .Create(movies) .GetOrderablePartitions(IOMaxDegreeOfParallelism) .Select((partition, partitionIndex) => Task.Run(async() => { IWebDriver?webDriver = useBrowser ? WebDriverHelper.Start(partitionIndex) : null; try { if (webDriver is not null) { webDriver.Url = "https://www.imdb.com/"; } await partition.ForEachAsync(async movieWithIndex => { (long index, string movie) = movieWithIndex; string[] files = Directory.GetFiles(movie, PathHelper.AllSearchPattern, SearchOption.TopDirectoryOnly); string[] jsonFiles = files.Where(file => file.EndsWith(JsonMetadataExtension, StringComparison.OrdinalIgnoreCase)).ToArray(); if (jsonFiles.Any()) { if (overwrite) { jsonFiles.ForEach(jsonFile => { log($"Delete imdb metadata {jsonFile}."); File.Delete(jsonFile); }); } else { log($"Skip {movie}."); return; } } string?nfo = files.FirstOrDefault(file => file.EndsWith(XmlMetadataExtension, StringComparison.OrdinalIgnoreCase)); if (string.IsNullOrWhiteSpace(nfo)) { log($"!Missing metadata {movie}."); return; } string imdbId = XDocument.Load(nfo).Root?.Element((isTV ? "imdb_id" : "imdbid") !)?.Value ?? NotExistingFlag; string imdbFile = Path.Combine(movie, $"{imdbId}{ImdbCacheExtension}"); string parentFile = Path.Combine(movie, $"{imdbId}.Parent{ImdbCacheExtension}"); string releaseFile = Path.Combine(movie, $"{imdbId}.Release{ImdbCacheExtension}"); if (string.Equals(imdbId, NotExistingFlag)) { if (!files.Any(file => string.Equals(file, imdbFile, StringComparison.OrdinalIgnoreCase))) { await File.WriteAllTextAsync(Path.Combine(movie, imdbFile), string.Empty); } if (!files.Any(file => string.Equals(file, releaseFile, StringComparison.OrdinalIgnoreCase))) { await File.WriteAllTextAsync(Path.Combine(movie, releaseFile), string.Empty); } string emptyMetadataFile = Path.Combine(movie, $"{NotExistingFlag}{JsonMetadataExtension}"); if (!files.Any(file => string.Equals(file, emptyMetadataFile, StringComparison.OrdinalIgnoreCase))) { await File.WriteAllTextAsync(emptyMetadataFile, "{}"); } return; } log($"{index} Start {movie}"); (string imdbUrl, string imdbHtml, string parentUrl, string parentHtml, string releaseUrl, string releaseHtml, ImdbMetadata imdbMetadata) = await Imdb.DownloadAsync( imdbId, useCache, useCache ? imdbFile : string.Empty, useCache ? parentFile : string.Empty, useCache ? releaseFile : string.Empty, webDriver); Debug.Assert(!string.IsNullOrWhiteSpace(imdbHtml)); if (!imdbMetadata.Regions.Any()) { log($"!Location is missing for {imdbId}: {movie}"); } if (!useCache || !files.Any(file => string.Equals(file, imdbFile, StringComparison.OrdinalIgnoreCase))) { log($"Downloaded {imdbUrl} to {imdbFile}."); await File.WriteAllTextAsync(imdbFile, imdbHtml); log($"Saved to {imdbFile}."); } if (!useCache || !files.Any(file => string.Equals(file, releaseFile, StringComparison.OrdinalIgnoreCase))) { log($"Downloaded {releaseUrl} to {releaseFile}."); await File.WriteAllTextAsync(releaseFile, releaseHtml); log($"Saved to {releaseFile}."); } if (!string.IsNullOrWhiteSpace(parentUrl) && (!useCache || !files.Any(file => string.Equals(file, parentFile, StringComparison.OrdinalIgnoreCase)))) { log($"Downloaded {parentUrl} to {parentFile}."); await File.WriteAllTextAsync(parentFile, parentHtml); log($"Saved to {parentFile}."); } string jsonFile = Path.Combine(movie, $"{imdbId}.{imdbMetadata.Year}.{string.Join(",", imdbMetadata.Regions.Take(5))}.{string.Join(",", imdbMetadata.Languages.Take(3))}{JsonMetadataExtension}"); log($"Merged {imdbUrl} and {releaseUrl} to {jsonFile}."); string jsonContent = JsonSerializer.Serialize( imdbMetadata, new JsonSerializerOptions() { WriteIndented = true, PropertyNamingPolicy = JsonNamingPolicy.CamelCase, Encoder = JavaScriptEncoder.Create(UnicodeRanges.All) }); await File.WriteAllTextAsync(jsonFile, jsonContent); log($"Saved to {jsonFile}."); }); } finally { webDriver?.Dispose(); } })) .ToArray(); await Task.WhenAll(tasks); }
public float DoAllMoves(Playfield playf) { print = playf.print; this.isLethalCheck = playf.isLethalCheck; enoughCalculations = false; botBase = Ai.Instance.botBase; this.posmoves.Clear(); this.twoturnfields.Clear(); this.addToPosmoves(playf); bool havedonesomething = true; List <Playfield> temp = new List <Playfield>(); int deep = 0; this.calculated = 0; Playfield bestold = null; bestoldval = -20000000; while (havedonesomething) { if (this.printNormalstuff) { LogHelper.WriteCombatLog("ailoop"); } GC.Collect(); temp.Clear(); temp.AddRange(this.posmoves); this.posmoves.Clear(); havedonesomething = false; threadnumberGlobal = 0; if (print) { startEnemyTurnSimThread(temp, 0, temp.Count); } else { Parallel.ForEach(Partitioner.Create(0, temp.Count), range => { startEnemyTurnSimThread(temp, range.Item1, range.Item2); }); } foreach (Playfield p in temp) { if (this.totalboards > 0) { this.calculated += p.nextPlayfields.Count; } if (this.calculated <= this.totalboards) { this.posmoves.AddRange(p.nextPlayfields); p.nextPlayfields.Clear(); } //get the best Playfield float pVal = botBase.getPlayfieldValue(p); if (pVal > bestoldval) { bestoldval = pVal; bestold = p; bestoldDuplicates.Clear(); } else if (pVal == bestoldval) { bestoldDuplicates.Add(p); } } if (isLethalCheck && bestoldval >= 10000) { this.posmoves.Clear(); } if (this.posmoves.Count > 0) { havedonesomething = true; } if (this.printNormalstuff) { int donec = 0; foreach (Playfield p in posmoves) { if (p.complete) { donec++; } } LogHelper.WriteCombatLog("deep " + deep + " len " + this.posmoves.Count + " dones " + donec); } cuttingposibilities(isLethalCheck); if (this.printNormalstuff) { LogHelper.WriteCombatLog("cut to len " + this.posmoves.Count); } deep++; temp.Clear(); if (this.calculated > this.totalboards) { enoughCalculations = true; } if (deep >= this.maxdeep) { enoughCalculations = true; } } if (this.dirtyTwoTurnSim > 0 && !twoturnfields.Contains(bestold)) { twoturnfields.Add(bestold); } this.posmoves.Clear(); this.posmoves.Add(bestold); this.posmoves.AddRange(bestoldDuplicates); // search the best play........................................................... //do dirtytwoturnsim first :D if (!isLethalCheck && bestoldval < 10000) { doDirtyTwoTurnsim(); } if (posmoves.Count >= 1) { posmoves.Sort((a, b) => botBase.getPlayfieldValue(b).CompareTo(botBase.getPlayfieldValue(a))); Playfield bestplay = posmoves[0]; float bestval = botBase.getPlayfieldValue(bestplay); int pcount = posmoves.Count; for (int i = 1; i < pcount; i++) { float val = botBase.getPlayfieldValue(posmoves[i]); if (bestval > val) { break; } if (posmoves[i].cardsPlayedThisTurn > bestplay.cardsPlayedThisTurn) { continue; } else if (posmoves[i].cardsPlayedThisTurn == bestplay.cardsPlayedThisTurn) { if (bestplay.optionsPlayedThisTurn > posmoves[i].optionsPlayedThisTurn) { continue; } else if (bestplay.optionsPlayedThisTurn == posmoves[i].optionsPlayedThisTurn && bestplay.enemyHero.HealthPoints <= posmoves[i].enemyHero.HealthPoints) { continue; } } bestplay = posmoves[i]; bestval = val; } this.bestmove = bestplay.getNextAction(); this.bestmoveValue = bestval; this.bestboard = new Playfield(bestplay); this.bestboard.guessingHeroHP = bestplay.guessingHeroHP; this.bestboard.value = bestplay.value; this.bestboard.hashcode = bestplay.hashcode; bestoldDuplicates.Clear(); return(bestval); } this.bestmove = null; this.bestmoveValue = -100000; this.bestboard = playf; return(-10000); }
/// <summary> /// Selects an item (such as Max or Min). /// </summary> /// <param name="array">The array to iterate over.</param> /// <param name="select">The function to select items over a subset.</param> /// <param name="reduce">The function to select the item of selection from the subsets.</param> /// <returns>The selected value.</returns> public static TOut Aggregate <T, TOut>(T[] array, Func <int, T, TOut> select, Func <TOut[], TOut> reduce) { if (select == null) { throw new ArgumentNullException(nameof(select)); } if (reduce == null) { throw new ArgumentNullException(nameof(reduce)); } // Special case: no action if (array == null || array.Length == 0) { return(reduce(Array.Empty <TOut>())); } // Special case: single action, inline if (array.Length == 1) { return(reduce(new[] { select(0, array[0]) })); } // Special case: straight execution without parallelism if (Control.MaxDegreeOfParallelism < 2) { var mapped = new TOut[array.Length]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k, array[k]); } return(reduce(mapped)); } // Common case var intermediateResults = new List <TOut>(); var syncLock = new object(); Parallel.ForEach( Partitioner.Create(0, array.Length), CreateParallelOptions(), () => new List <TOut>(), (range, _, localData) => { var mapped = new TOut[range.Item2 - range.Item1]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k + range.Item1, array[k + range.Item1]); } localData.Add(reduce(mapped)); return(localData); }, localResult => { lock (syncLock) { intermediateResults.Add(reduce(localResult.ToArray())); } }); return(reduce(intermediateResults.ToArray())); }
public static void Main(string[] args) { #region Intro Console.ForegroundColor = Color.White; Logger.Inf("Select your proxies list.", true, false); var fileDialog = new OpenFileDialog { Filter = "Text Files|*.txt", Title = "Select your proxies list" }; while (fileDialog.ShowDialog() != DialogResult.OK) { } var fileName = fileDialog.FileName; var proxyList = FileHelper.ReadAsList(fileDialog.FileName); Logger.Inf($"{proxyList.Count} proxies loaded from {Path.GetFileName(fileName)}", true); Logger.Inf("Select your code list.", true, false); fileDialog.Title = "Select your code list"; while (fileDialog.ShowDialog() != DialogResult.OK) { } fileName = fileDialog.FileName; var codeList = FileHelper.ReadAsList(fileDialog.FileName); Logger.Inf($"{codeList.Count} codes loaded from {Path.GetFileName(fileName)}", true); var proxyTypes = new[] { "HTTP", "SOCKS4", "SOCKS5" }; var proxyType = string.Empty; while (string.IsNullOrEmpty(proxyType)) { Logger.Inf($"Proxy Type [{string.Join(", ", proxyTypes)}] : ", newLine: false); var input = Console.ReadLine(); if (proxyTypes.Contains(input)) { proxyType = input; } } var maxThreads = -1; while (maxThreads < 0) { Logger.Inf("Max Threads : ", newLine: false); try { var input = Console.ReadLine(); if (string.IsNullOrEmpty(input)) { continue; } maxThreads = int.Parse(input); } catch (FormatException) { // ignored } } #endregion var caller = new Netflix { ProxyList = proxyList.ToArray(), ProxyType = proxyType, }; Console.Clear(); ThreadPool.SetMinThreads(maxThreads, maxThreads); int checkedCount = 0, errorCount = 0, bannedCount = 0, validCount = 0, totalBalance = 0; FileHelper.BeginCheck(); CpmTask.Start(); Globals.Working = true; Parallel.ForEach(Partitioner.Create(codeList, EnumerablePartitionerOptions.NoBuffering), new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, code => { caller.Check(code, (codeCb, proxy, valid, error, balance) => { if (new[] { "generic_failure", "unable_to_redeem", "Unable to determine balance", "Proxy timed out" }.Contains(error)) { errorCount += 1; } if (error.Contains("Banned proxy")) { bannedCount += 1; } if (valid && !string.IsNullOrEmpty(balance)) { checkedCount += 1; validCount += 1; var rawBalance = int.Parse(new Regex("(\\d{1,3})").Match(balance).Groups[1].Value); totalBalance += rawBalance; FileHelper.Write("Hits.txt", $"{code} | €{rawBalance}"); Console.WriteLine($"{code} | {rawBalance} EUR", Color.Green); } else if (error.Contains("single_use_code")) { checkedCount += 1; } Interlocked.Increment(ref Globals.LastChecks); Console.Title = $"NGC – Checked : {checkedCount}/{codeList.Count} – Errors : {errorCount} – Bans : {bannedCount} – Hits : {validCount} – Total: {totalBalance} EUR – CPM: {CpmTask.GetCpm()}"; }); }); Logger.Inf("Job done."); Thread.Sleep(-1); }
protected override MetaMorpheusEngineResults RunSpecific() { double progress = 0; int oldPercentProgress = 0; ReportProgress(new ProgressEventArgs(oldPercentProgress, "Performing modern search... " + CurrentPartition + "/" + commonParameters.TotalPartitions, nestedIds)); byte byteScoreCutoff = (byte)commonParameters.ScoreCutoff; if (commonParameters.CalculateEValue) { byteScoreCutoff = 1; } Parallel.ForEach(Partitioner.Create(0, ListOfSortedms2Scans.Length), new ParallelOptions { MaxDegreeOfParallelism = commonParameters.MaxThreadsToUsePerFile }, (range, loopState) => { byte[] scoringTable = new byte[PeptideIndex.Count]; List <int> idsOfPeptidesPossiblyObserved = new List <int>(); for (int i = range.Item1; i < range.Item2; i++) { // Stop loop if canceled if (GlobalVariables.StopLoops) { loopState.Stop(); return; } // empty the scoring table to score the new scan (conserves memory compared to allocating a new array) Array.Clear(scoringTable, 0, scoringTable.Length); idsOfPeptidesPossiblyObserved.Clear(); var scan = ListOfSortedms2Scans[i]; // get fragment bins for this scan List <int> allBinsToSearch = GetBinsToSearch(scan); // get allowed theoretical masses from the known experimental mass // note that this is the OPPOSITE of the classic search (which calculates experimental masses from theoretical values) // this is just PRELIMINARY precursor-mass filtering // additional checks are made later to ensure that the theoretical precursor mass is acceptable var notches = MassDiffAcceptor.GetAllowedPrecursorMassIntervals(scan.PrecursorMass); double lowestMassPeptideToLookFor = Double.NegativeInfinity; double highestMassPeptideToLookFor = Double.PositiveInfinity; double largestMassDiff = notches.Max(p => p.AllowedInterval.Maximum); double smallestMassDiff = notches.Min(p => p.AllowedInterval.Minimum); if (!Double.IsInfinity(largestMassDiff)) { double largestOppositeMassDiff = -1 * (notches.Max(p => p.AllowedInterval.Maximum) - scan.PrecursorMass); lowestMassPeptideToLookFor = scan.PrecursorMass + largestOppositeMassDiff; } if (!Double.IsNegativeInfinity(smallestMassDiff)) { double smallestOppositeMassDiff = -1 * (notches.Min(p => p.AllowedInterval.Minimum) - scan.PrecursorMass); highestMassPeptideToLookFor = scan.PrecursorMass + smallestOppositeMassDiff; } // first-pass scoring IndexedScoring(allBinsToSearch, scoringTable, byteScoreCutoff, idsOfPeptidesPossiblyObserved, scan.PrecursorMass, lowestMassPeptideToLookFor, highestMassPeptideToLookFor); // done with indexed scoring; refine scores and create PSMs foreach (var id in idsOfPeptidesPossiblyObserved) { var compactPeptide = PeptideIndex[id]; var productMasses = compactPeptide.ProductMassesMightHaveDuplicatesAndNaNs(ProductTypes); Array.Sort(productMasses); double scanPrecursorMass = scan.PrecursorMass; var thisScore = CalculatePeptideScoreOld(scan.TheScan, commonParameters.ProductMassTolerance, productMasses, scanPrecursorMass, DissociationTypes, commonParameters.AddCompIons, 0); int notch = MassDiffAcceptor.Accepts(scan.PrecursorMass, compactPeptide.MonoisotopicMassIncludingFixedMods); bool meetsScoreCutoff = thisScore >= commonParameters.ScoreCutoff; bool scoreImprovement = PeptideSpectralMatches[i] == null || (thisScore - PeptideSpectralMatches[i].RunnerUpScore) > -PeptideSpectralMatch.ToleranceForScoreDifferentiation; if (meetsScoreCutoff && scoreImprovement || commonParameters.CalculateEValue) { if (PeptideSpectralMatches[i] == null) { PeptideSpectralMatches[i] = new PeptideSpectralMatch(compactPeptide, notch, thisScore, i, scan, commonParameters.DigestionParams); } else { PeptideSpectralMatches[i].AddOrReplace(compactPeptide, thisScore, notch, commonParameters.ReportAllAmbiguity); } if (commonParameters.CalculateEValue) { PeptideSpectralMatches[i].AllScores.Add(thisScore); } } } // report search progress progress++; var percentProgress = (int)((progress / ListOfSortedms2Scans.Length) * 100); if (percentProgress > oldPercentProgress) { oldPercentProgress = percentProgress; ReportProgress(new ProgressEventArgs(percentProgress, "Performing modern search... " + CurrentPartition + "/" + commonParameters.TotalPartitions, nestedIds)); } } }); // remove peptides below the score cutoff that were stored to calculate expectation values if (commonParameters.CalculateEValue) { for (int i = 0; i < PeptideSpectralMatches.Length; i++) { if (PeptideSpectralMatches[i] != null && PeptideSpectralMatches[i].Score < commonParameters.ScoreCutoff) { PeptideSpectralMatches[i] = null; } } } return(new MetaMorpheusEngineResults(this)); }
public async Task Generate() { try { if (string.IsNullOrEmpty(ProjectFilePath)) { Log.Exception("ProjectFilePath is empty: " + Project.ToString()); return; } ProjectDestinationFolder = GetProjectDestinationPath(Project, SolutionGenerator.SolutionDestinationFolder); if (ProjectDestinationFolder == null) { Log.Exception("Errors evaluating project: " + Project.Id); return; } Log.Write(ProjectDestinationFolder, ConsoleColor.DarkCyan); if (SolutionGenerator.SolutionSourceFolder is string solutionFolder) { ProjectSourcePath = Paths.MakeRelativeToFolder(ProjectFilePath, solutionFolder); } else { ProjectSourcePath = ProjectFilePath; } if (File.Exists(Path.Combine(ProjectDestinationFolder, Constants.DeclaredSymbolsFileName + ".txt"))) { // apparently someone already generated a project with this assembly name - their assembly wins Log.Exception(string.Format( "A project with assembly name {0} was already generated, skipping current project: {1}", this.AssemblyName, this.ProjectFilePath), isSevere: false); return; } if (Configuration.CreateFoldersOnDisk) { Directory.CreateDirectory(ProjectDestinationFolder); } var documents = Project.Documents.Where(IncludeDocument).ToList(); var generationTasks = Partitioner.Create(documents) .GetPartitions(Environment.ProcessorCount) .Select(partition => Task.Run(async() => { using (partition) { while (partition.MoveNext()) { await GenerateDocument(partition.Current); } } })); await Task.WhenAll(generationTasks); foreach (var document in documents) { OtherFiles.Add(Paths.GetRelativeFilePathInProject(document)); } if (Configuration.WriteProjectAuxiliaryFilesToDisk) { GenerateProjectFile(); GenerateDeclarations(); GenerateBaseMembers(); GenerateImplementedInterfaceMembers(); GenerateProjectInfo(); GenerateReferencesDataFiles( this.SolutionGenerator.SolutionDestinationFolder, ReferencesByTargetAssemblyAndSymbolId); GenerateSymbolIDToListOfDeclarationLocationsMap( ProjectDestinationFolder, SymbolIDToListOfLocationsMap); GenerateReferencedAssemblyList(); GenerateUsedReferencedAssemblyList(); GenerateProjectExplorer(); GenerateNamespaceExplorer(); GenerateIndex(); } var compilation = Project.GetCompilationAsync().Result; var diagnostics = compilation.GetDiagnostics().Select(d => d.ToString()).ToArray(); if (diagnostics.Length > 0) { var diagnosticsTxt = Path.Combine(this.ProjectDestinationFolder, "diagnostics.txt"); File.WriteAllLines(diagnosticsTxt, diagnostics); } } catch (Exception ex) { Log.Exception(ex, "Project generation failed for: " + ProjectSourcePath); } }
/// <summary> /// Search subdomains using wordlists /// </summary> private void SearchCommonNames() { var message = $"Searching subdomains of {strDomain} using common DNS names"; Program.LogThis(new Log(Log.ModuleType.DNSCommonNames, message, Log.LogType.debug)); Program.ChangeStatus(message); var names = new List <string>(); try { names.AddRange(File.ReadAllLines(CommonNamesFileName)); } catch { Program.LogThis(new Log(Log.ModuleType.DNSCommonNames, $"Error opening file: {CommonNamesFileName}", Log.LogType.error)); return; } var nsServerList = DNSUtil.GetNSServer(Resolve, strDomain, Resolve.DnsServers[0].Address.ToString()); foreach (var nsServer in nsServerList) { if (DNSUtil.IsDNSAnyCast(Resolve, nsServer, strDomain)) { Program.LogThis(new Log(Log.ModuleType.DNSCommonNames, $"DNS server is Anycast, not used: {nsServer}", Log.LogType.debug)); } else { var op = Partitioner.Create(names); var po = new ParallelOptions(); if (Program.cfgCurrent.ParallelDnsQueries != 0) { po.MaxDegreeOfParallelism = Program.cfgCurrent.ParallelDnsQueries; } Parallel.ForEach(op, po, delegate(string name) { if (CheckToSkip()) { return; } var subdomain = $"{name}.{strDomain}"; Program.LogThis(new Log(Log.ModuleType.DNSCommonNames, string.Format("[{0}] Trying resolve subdomain: {1} with NameServer {0}", nsServer, subdomain), Log.LogType.debug)); foreach (var ip in DNSUtil.GetHostAddresses(Resolve, subdomain, nsServer)) { Program.LogThis(new Log(Log.ModuleType.DNSCommonNames, $"[{nsServer}] Found subdomain {subdomain}", Log.LogType.medium)); try { Program.data.AddResolution(subdomain, ip.ToString(), $"Common Names [{subdomain}]", MaxRecursion, Program.cfgCurrent, true); } catch (Exception) { } } } ); if (!bSearchWithAllDNS) { break; } } } }
public static void TestForEach_Break(int loopsize, int breakpoint) { var complete = new bool[loopsize]; // NOTE: Make sure and use some collection that is NOT a list or an // array. Lists/arrays will be essentially be passed through // Parallel.For() logic, which will make this test fail. var iqueue = new Queue <int>(); for (int i = 0; i < loopsize; i++) { iqueue.Enqueue(i); } Parallel.ForEach(iqueue, delegate(int i, ParallelLoopState ps) { complete[i] = true; if (i >= breakpoint) { ps.Break(); } //Thread.Sleep(2); }); // Same rules as For-loop. Should not be any omissions prior // to break, and there should be some after. for (int i = 0; i <= breakpoint; i++) { Assert.True(complete[i], string.Format("TestForEachBreak(loopsize={0},breakpoint={1}): Failed: incomplete at {2}", loopsize, breakpoint, i)); } bool result = false; for (int i = breakpoint + 1; i < loopsize; i++) { if (!complete[i]) { result = true; break; } } Assert.True(result, string.Format("TestForEachBreak(loopsize={0},breakpoint={1}): Failed: Could not detect any interruption of For-loop.", loopsize, breakpoint)); // // Now try it for OrderablePartitioner // var ilist = new List <int>(); for (int i = 0; i < loopsize; i++) { ilist.Add(i); complete[i] = false; } OrderablePartitioner <int> mop = Partitioner.Create(ilist, true); Parallel.ForEach(mop, delegate(int item, ParallelLoopState ps, long index) { //break does not imply that the other iterations will not be run //https://docs.microsoft.com/en-us/dotnet/api/system.threading.tasks.parallelloopstate.break#System_Threading_Tasks_ParallelLoopState_Break //execute the test with high loop size and low break index complete[index] = true; if (index >= breakpoint) { ps.Break(); } //Thread.Sleep(2); }); for (int i = 0; i <= breakpoint; i++) { Assert.True(complete[i], string.Format("TestForEachBreak(loopsize={0},breakpoint={1}): Failed: incomplete at {2}", loopsize, breakpoint, i)); } result = false; for (int i = breakpoint + 1; i < loopsize; i++) { if (!complete[i]) { result = true; break; } } Assert.True(result, string.Format("TestForEachBreak(loopsize={0},breakpoint={1}): Failed: Could not detect any interruption of For-loop.", loopsize, breakpoint)); }
static int Main(string[] args) { var softwareUnderTest = args[0]; _logFile = string.Format("{0}-{1}.log", softwareUnderTest, DateTime.Now.ToString("yyyy-MM-dd HHmmss")); if (!File.Exists(softwareUnderTest)) { LogText(string.Format("The file {0} does not exist.", softwareUnderTest)); return(-1); } LogText("Loading test suite..."); // Load static container of all tests. List <TestEnvironment> listEnvironments = new List <TestEnvironment>(); TestEnvironments.AddAll(listEnvironments); int testIndex = 0; var f = TaskScheduler.Default; var options = new ParallelOptions() { MaxDegreeOfParallelism = 4, }; // We can run tests on XP and Vista/2003/2008 at the same time since it's separate VMware images. var environmentsGroupedByVmwareImage = listEnvironments.GroupBy(item => item.VMwarePath).ToList(); var partitioner = Partitioner.Create(environmentsGroupedByVmwareImage, EnumerablePartitionerOptions.NoBuffering); Parallel.ForEach(partitioner, options, environmentGroup => { foreach (var environment in environmentGroup) { int localIndex; lock (_lockCounterTest) { localIndex = ++testIndex; string message = string.Format("{0}: {1}/{2} - Test: {3} on {4} with db {5}. Image: {6} (Snapshot: {7})", DateTime.Now, localIndex, listEnvironments.Count, environment.Description, environment.OperatingSystem, environment.DatabaseType, Path.GetFileName(environment.VMwarePath), environment.SnapshotName); LogText(message); } var runner = new TestRunner(true, environment, false, softwareUnderTest); try { runner.Run(); LogText(string.Format("{0}: Test {1} completed successfully.", DateTime.Now, localIndex)); } catch (Exception ex) { LogText(string.Format("{0}: Test {1} failed.", DateTime.Now, localIndex)); LogText(ex.ToString()); throw; } } }); System.Console.WriteLine("All tests completed succesfully."); if (System.Diagnostics.Debugger.IsAttached) { System.Console.WriteLine("Press Enter to exit."); System.Console.ReadLine(); } return(0); }
/// <summary> /// Sets the color of all transparent pixels based on the non-transparent color values next to them. /// This does not affect any alpha values but prepares the Layer for correct filtering once uploaded /// to <see cref="Duality.Resources.Texture"/>. /// </summary> public void ColorTransparentPixels() { ColorRgba[] dataCopy = new ColorRgba[this.data.Length]; Array.Copy(this.data, dataCopy, this.data.Length); #if !DISABLE_ASYNC Parallel.ForEach(Partitioner.Create(0, this.data.Length), range => { #endif Point2 pos = new Point2(); int[] nPos = new int[8]; bool[] nOk = new bool[8]; int[] mixClr = new int[4]; #if !DISABLE_ASYNC for (int i = range.Item1; i < range.Item2; i++) { #else for (int i = 0; i < this.data.Length; i++) { #endif if (dataCopy[i].A != 0) continue; pos.Y = i / this.width; pos.X = i - (pos.Y * this.width); mixClr[0] = 0; mixClr[1] = 0; mixClr[2] = 0; mixClr[3] = 0; nPos[0] = i - this.width; nPos[1] = i + this.width; nPos[2] = i - 1; nPos[3] = i + 1; nPos[4] = i - this.width - 1; nPos[5] = i + this.width - 1; nPos[6] = i - this.width + 1; nPos[7] = i + this.width + 1; nOk[0] = pos.Y > 0; nOk[1] = pos.Y < this.height - 1; nOk[2] = pos.X > 0; nOk[3] = pos.X < this.width - 1; nOk[4] = nOk[2] && nOk[0]; nOk[5] = nOk[2] && nOk[1]; nOk[6] = nOk[3] && nOk[0]; nOk[7] = nOk[3] && nOk[1]; int nMult = 2; for (int j = 0; j < 8; j++) { if (!nOk[j]) continue; if (dataCopy[nPos[j]].A == 0) continue; mixClr[0] += dataCopy[nPos[j]].R * nMult; mixClr[1] += dataCopy[nPos[j]].G * nMult; mixClr[2] += dataCopy[nPos[j]].B * nMult; mixClr[3] += nMult; if (j > 3) nMult = 1; } if (mixClr[3] > 0) { this.data[i].R = (byte)Math.Round((float)mixClr[0] / (float)mixClr[3]); this.data[i].G = (byte)Math.Round((float)mixClr[1] / (float)mixClr[3]); this.data[i].B = (byte)Math.Round((float)mixClr[2] / (float)mixClr[3]); } } #if !DISABLE_ASYNC }); #endif }
/// <summary> /// Selects an item (such as Max or Min). /// </summary> /// <param name="array">The array to iterate over.</param> /// <param name="select">The function to select items over a subset.</param> /// <param name="reduce">The function to select the item of selection from the subsets.</param> /// <returns>The selected value.</returns> public static TOut Aggregate <T, TOut>(T[] array, Func <int, T, TOut> select, Func <TOut[], TOut> reduce) { if (select == null) { throw new ArgumentNullException("select"); } if (reduce == null) { throw new ArgumentNullException("reduce"); } // Special case: no action if (array == null || array.Length == 0) { return(reduce(new TOut[0])); } // Special case: single action, inline if (array.Length == 1) { return(reduce(new[] { select(0, array[0]) })); } // Special case: straight execution without parallelism if (Control.DisableParallelization || Control.NumberOfParallelWorkerThreads < 2) { var mapped = new TOut[array.Length]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k, array[k]); } return(reduce(mapped)); } #if (PORTABLE || NET35) var tasks = new Task <TOut> [Control.NumberOfParallelWorkerThreads]; var size = array.Length / tasks.Length; // partition the jobs into separate sets for each but the last worked thread for (var i = 0; i < tasks.Length - 1; i++) { var start = (i * size); var stop = ((i + 1) * size); tasks[i] = Task.Factory.StartNew(() => { var mapped = new TOut[stop - start]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k + start, array[k + start]); } return(reduce(mapped)); }); } // add another set for last worker thread tasks[tasks.Length - 1] = Task.Factory.StartNew(() => { var start = ((tasks.Length - 1) * size); var mapped = new TOut[array.Length - start]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k + start, array[k + start]); } return(reduce(mapped)); }); return(Task.Factory .ContinueWhenAll(tasks, tsk => reduce(tsk.Select(t => t.Result).ToArray())) .Result); #else var intermediateResults = new List <TOut>(); var syncLock = new object(); var maxThreads = Control.DisableParallelization ? 1 : Control.NumberOfParallelWorkerThreads; Parallel.ForEach( Partitioner.Create(0, array.Length), new ParallelOptions { MaxDegreeOfParallelism = maxThreads }, () => new List <TOut>(), (range, loop, localData) => { var mapped = new TOut[range.Item2 - range.Item1]; for (int k = 0; k < mapped.Length; k++) { mapped[k] = select(k + range.Item1, array[k + range.Item1]); } localData.Add(reduce(mapped)); return(localData); }, localResult => { lock (syncLock) { intermediateResults.Add(reduce(localResult.ToArray())); } }); return(reduce(intermediateResults.ToArray())); #endif }
private ColorRgba[] InternalRescale(int w, int h, ImageScaleFilter filter) { if (this.width == w && this.height == h) return null; ColorRgba[] tempDestData = new ColorRgba[w * h]; if (filter == ImageScaleFilter.Nearest) { // Don't use Parallel.For here, the overhead is too big and the compiler // does a great job optimizing this piece of code without, so don't get in the way. for (int i = 0; i < tempDestData.Length; i++) { int y = i / w; int x = i - (y * w); int xTmp = (x * this.width) / w; int yTmp = (y * this.height) / h; int nTmp = xTmp + (yTmp * this.width); tempDestData[i] = this.data[nTmp]; } } else if (filter == ImageScaleFilter.Linear) { #if !DISABLE_ASYNC Parallel.ForEach(Partitioner.Create(0, tempDestData.Length), range => { for (int i = range.Item1; i < range.Item2; i++) { #else for (int i = 0; i < tempDestData.Length; i++) { #endif int y = i / w; int x = i - (y * w); float xRatio = ((float)(x * this.width) / (float)w) + 0.5f; float yRatio = ((float)(y * this.height) / (float)h) + 0.5f; int xTmp = (int)xRatio; int yTmp = (int)yRatio; xRatio -= xTmp; yRatio -= yTmp; int xTmp2 = xTmp + 1; int yTmp2 = yTmp + 1; xTmp = xTmp < this.width ? xTmp : this.width - 1; yTmp = (yTmp < this.height ? yTmp : this.height - 1) * this.width; xTmp2 = xTmp2 < this.width ? xTmp2 : this.width - 1; yTmp2 = (yTmp2 < this.height ? yTmp2 : this.height - 1) * this.width; int nTmp0 = xTmp + yTmp; int nTmp1 = xTmp2 + yTmp; int nTmp2 = xTmp + yTmp2; int nTmp3 = xTmp2 + yTmp2; tempDestData[i].R = (byte) ( ((float)this.data[nTmp0].R * (1.0f - xRatio) * (1.0f - yRatio)) + ((float)this.data[nTmp1].R * xRatio * (1.0f - yRatio)) + ((float)this.data[nTmp2].R * yRatio * (1.0f - xRatio)) + ((float)this.data[nTmp3].R * xRatio * yRatio) ); tempDestData[i].G = (byte) ( ((float)this.data[nTmp0].G * (1.0f - xRatio) * (1.0f - yRatio)) + ((float)this.data[nTmp1].G * xRatio * (1.0f - yRatio)) + ((float)this.data[nTmp2].G * yRatio * (1.0f - xRatio)) + ((float)this.data[nTmp3].G * xRatio * yRatio) ); tempDestData[i].B = (byte) ( ((float)this.data[nTmp0].B * (1.0f - xRatio) * (1.0f - yRatio)) + ((float)this.data[nTmp1].B * xRatio * (1.0f - yRatio)) + ((float)this.data[nTmp2].B * yRatio * (1.0f - xRatio)) + ((float)this.data[nTmp3].B * xRatio * yRatio) ); tempDestData[i].A = (byte) ( ((float)this.data[nTmp0].A * (1.0f - xRatio) * (1.0f - yRatio)) + ((float)this.data[nTmp1].A * xRatio * (1.0f - yRatio)) + ((float)this.data[nTmp2].A * yRatio * (1.0f - xRatio)) + ((float)this.data[nTmp3].A * xRatio * yRatio) ); } #if !DISABLE_ASYNC }); #endif } return tempDestData; }
/// <summary> /// Executes a for loop in which iterations may run in parallel. /// </summary> /// <param name="fromInclusive">The start index, inclusive.</param> /// <param name="toExclusive">The end index, exclusive.</param> /// <param name="rangeSize">The partition size for splitting work into smaller pieces.</param> /// <param name="body">The body to be invoked for each iteration range.</param> public static void For(int fromInclusive, int toExclusive, int rangeSize, Action <int, int> body) { if (body == null) { throw new ArgumentNullException("body"); } if (fromInclusive < 0) { throw new ArgumentOutOfRangeException("fromInclusive"); } if (fromInclusive > toExclusive) { throw new ArgumentOutOfRangeException("toExclusive"); } if (rangeSize < 1) { throw new ArgumentOutOfRangeException("rangeSize"); } var length = toExclusive - fromInclusive; // Special case: nothing to do if (length <= 0) { return; } var maxDegreeOfParallelism = Control.NumberOfParallelWorkerThreads; // Special case: not worth to parallelize, inline if (Control.DisableParallelization || maxDegreeOfParallelism < 2 || (rangeSize * 2) > length) { body(fromInclusive, toExclusive); return; } #if (PORTABLE || NET35) var tasks = new Task[Math.Min(maxDegreeOfParallelism, length / rangeSize)]; rangeSize = (toExclusive - fromInclusive) / tasks.Length; // partition the jobs into separate sets for each but the last worked thread for (var i = 0; i < tasks.Length - 1; i++) { var start = fromInclusive + (i * rangeSize); var stop = fromInclusive + ((i + 1) * rangeSize); tasks[i] = Task.Factory.StartNew(() => body(start, stop)); } // add another set for last worker thread tasks[tasks.Length - 1] = Task.Factory.StartNew(() => body(fromInclusive + ((tasks.Length - 1) * rangeSize), toExclusive)); Task.WaitAll(tasks); #else Parallel.ForEach( Partitioner.Create(fromInclusive, toExclusive, rangeSize), new ParallelOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism }, (range, loopState) => body(range.Item1, range.Item2)); #endif }
/// <summary> /// /// </summary> public void ProcessCallback(int[][] buf, int samples) { if (samples == 0) { return; } using (VstAudioBufferManager bufA = new VstAudioBufferManager(2, samples)) using (VstAudioBufferManager bufB = new VstAudioBufferManager(2, samples)) { lock (InstrumentBase.VstPluginContextLockObject) { bool processed = false; foreach (var vp in this) { var ctx = vp.PluginContext; if (ctx != null) { int idx = 0; foreach (VstAudioBuffer vab in bufA) { Parallel.ForEach(Partitioner.Create(0, samples), range => { for (var i = range.Item1; i < range.Item2; i++) { vab[i] = (float)buf[idx][i] / 32767.0f; } }); //for (int i = 0; i < samples; i++) // vab[i] = (float)buf[idx][i] / (float)int.MaxValue; idx++; } break; } } VstAudioBufferManager bufa = bufA; VstAudioBufferManager bufb = bufA; foreach (var vp in this) { var ctx = vp.PluginContext; if (ctx != null) { ctx.Context.PluginCommandStub.SetBlockSize(samples); ctx.Context.PluginCommandStub.ProcessReplacing(bufa.ToArray <VstAudioBuffer>(), bufb.ToArray <VstAudioBuffer>()); processed = true; } var tmp = bufa; bufa = bufb; bufb = tmp; } if (processed) { int idx = 0; foreach (VstAudioBuffer vab in bufb) { Parallel.ForEach(Partitioner.Create(0, samples), range => { for (var i = range.Item1; i < range.Item2; i++) { buf[idx][i] = (int)(vab[i] * 32767.0f); } }); //for (int i = 0; i < samples; i++) // buf[idx][i] = (int)(vab[i] * int.MaxValue); idx++; } } } } }
public void ProcessPhoneCalls() { var phoneCallsFunc = new PhoneCallsImpl(); DataTable toBeInsertedDataTable; OleDbDataReader dataReader = null; var exceptions = new ConcurrentQueue <Exception>(); var column = string.Empty; var lastImportedPhoneCallDate = DateTime.MinValue; //OPEN CONNECTIONS _sourceDbConnector.Open(); _destinationDbConnector.Open(); dataReader = DbRoutines.Executereader(SqLs.GetLastImportedPhonecallDate(_phoneCallsTableName, false), _destinationDbConnector); if (dataReader.Read() && !dataReader.IsDBNull(0)) { lastImportedPhoneCallDate = dataReader.GetDateTime(dataReader.GetOrdinal("SessionIdTime")); lastImportedPhoneCallDate = lastImportedPhoneCallDate.AddDays(+1); dataReader.CloseDataReader(); } else { //Table is empty in this case we need to read from the source that we will import the data from dataReader = DbRoutines.Executereader(SqLs.GetLastImportedPhonecallDate("DialogsView", true), _sourceDbConnector); if (dataReader.Read() && !dataReader.IsDBNull(0)) { lastImportedPhoneCallDate = dataReader.GetDateTime(dataReader.GetOrdinal("SessionIdTime")); } dataReader.CloseDataReader(); } while (lastImportedPhoneCallDate <= DateTime.Now) { //Construct CREATE_IMPORT_PHONE_CALLS_QUERY var sql = SqLs.CreateImportCallsQueryLync2013(lastImportedPhoneCallDate); if (lastImportedPhoneCallDate > DateTime.MinValue) { Console.WriteLine("Importing PhoneCalls from " + _phoneCallsTableName + " since " + lastImportedPhoneCallDate); } else { Console.WriteLine("Importing PhoneCalls from " + _phoneCallsTableName + " since the begining"); } //Read DB and map it to List of PhoneCalls var phoneCalls = Db.ReadSqlData(DbRoutines.Executereader(sql, _sourceDbConnector), Db.PhoneCallsSelector).ToList(); if (phoneCalls.Count() > 0) { var status = new object(); var partitionsize = Partitioner.Create(0, phoneCalls.Count()); Parallel.ForEach(partitionsize, (range, loopStet) => { for (var i = range.Item1; i < range.Item2; i++) { phoneCallsFunc.ProcessPhoneCall(phoneCalls[i]); } }); // Bulk insert toBeInsertedDataTable = phoneCalls.ConvertToDataTable(); toBeInsertedDataTable.BulkInsert(_phoneCallsTableName, _destinationDbConnector.ConnectionString); toBeInsertedDataTable.Dispose(); Console.WriteLine(" [+] Imported: " + phoneCalls.Count + " phone calls."); } // Increment the datetime object by 1 day. lastImportedPhoneCallDate = lastImportedPhoneCallDate.AddDays(+1); GC.Collect(); } //Close All Connection and DataReaders _sourceDbConnector.Close(); _destinationDbConnector.Close(); if (dataReader.IsClosed == false) { dataReader.Close(); } Console.WriteLine("Finish importing Calls from " + _phoneCallsTableName); }
public static IEnumerable <object[]> AggregateExceptionData(int[] counts) { foreach (object[] results in UnorderedSources.Ranges(counts.Cast <int>())) { Labeled <ParallelQuery <int> > query = (Labeled <ParallelQuery <int> >)results[0]; if (query.ToString().StartsWith("Partitioner")) { yield return(new object[] { Labeled.Label(query.ToString(), Partitioner.Create(UnorderedSources.GetRangeArray(0, (int)results[1]), false).AsParallel()), results[1] }); } else if (query.ToString().StartsWith("Enumerable.Range")) { yield return(new object[] { Labeled.Label(query.ToString(), new StrictPartitioner <int>(Partitioner.Create(Enumerable.Range(0, (int)results[1]), EnumerablePartitionerOptions.None), (int)results[1]).AsParallel()), results[1] }); } else { yield return(results); } } }
private void SeparateRenderables(RenderContext context, bool invalidateSceneGraph, bool invalidatePerFrameRenderables) { Clear(invalidateSceneGraph, invalidatePerFrameRenderables); if (invalidateSceneGraph) { viewportRenderables.AddRange(Viewport.Renderables); renderer.UpdateSceneGraph(RenderContext, viewportRenderables, perFrameFlattenedScene); #if DEBUG Debug.WriteLine("Flatten Scene Graph"); #endif } int sceneCount = perFrameFlattenedScene.Count; if (invalidatePerFrameRenderables) { #if DEBUG Debug.WriteLine("Get PerFrameRenderables"); #endif bool isInScreenSpacedGroup = false; int screenSpacedGroupDepth = int.MaxValue; for (int i = 0; i < sceneCount;) { var renderable = perFrameFlattenedScene[i]; renderable.Value.Update(context); var type = renderable.Value.RenderType; int depth = renderable.Key; if (!renderable.Value.IsRenderable) { //Skip scene graph depth larger than current node ++i; for (; i < sceneCount; ++i) { if (perFrameFlattenedScene[i].Key <= depth) { break; } i += perFrameFlattenedScene[i].Value.ItemsInternal.Count; } continue; } if (renderable.Value.RenderCore.NeedUpdate) // Run update function at the beginning of actual rendering. { needUpdateCores.Add(renderable.Value.RenderCore); } ++i; // Add node into screen spaced array if the node belongs to a screen spaced group. if (isInScreenSpacedGroup && depth > screenSpacedGroupDepth) { screenSpacedNodes.Add(renderable.Value); continue; } isInScreenSpacedGroup = false; screenSpacedGroupDepth = int.MaxValue; switch (type) { case RenderType.Opaque: opaqueNodes.Add(renderable.Value); break; case RenderType.Light: lightNodes.Add(renderable.Value); break; case RenderType.Transparent: transparentNodes.Add(renderable.Value); break; case RenderType.Particle: particleNodes.Add(renderable.Value); break; case RenderType.PreProc: preProcNodes.Add(renderable.Value); break; case RenderType.PostProc: postProcNodes.Add(renderable.Value); break; case RenderType.ScreenSpaced: screenSpacedNodes.Add(renderable.Value); isInScreenSpacedGroup = true; screenSpacedGroupDepth = renderable.Key; break; } } if (RenderConfiguration.EnableRenderOrder) { for (int i = 0; i < preProcNodes.Count; ++i) { preProcNodes[i].UpdateRenderOrderKey(); } preProcNodes.Sort(); for (int i = 0; i < opaqueNodes.Count; ++i) { opaqueNodes[i].UpdateRenderOrderKey(); } opaqueNodes.Sort(); for (int i = 0; i < postProcNodes.Count; ++i) { postProcNodes[i].UpdateRenderOrderKey(); } postProcNodes.Sort(); for (int i = 0; i < particleNodes.Count; ++i) { particleNodes[i].UpdateRenderOrderKey(); } particleNodes.Sort(); } opaquePartitioner = opaqueNodes.Count > 0 ? Partitioner.Create(0, opaqueNodes.Count, FrustumPartitionSize) : null; transparentPartitioner = transparentNodes.Count > 0 ? Partitioner.Create(0, transparentNodes.Count, FrustumPartitionSize) : null; SetupFrustumTestFunctions(); } else { for (int i = 0; i < sceneCount;) { var renderable = perFrameFlattenedScene[i]; renderable.Value.Update(context); if (!renderable.Value.IsRenderable) { //Skip scene graph depth larger than current node int depth = renderable.Key; ++i; for (; i < sceneCount; ++i) { if (perFrameFlattenedScene[i].Key <= depth) { break; } i += perFrameFlattenedScene[i].Value.ItemsInternal.Count; } continue; } if (renderable.Value.RenderCore.NeedUpdate) // Run update function at the beginning of actual rendering. { needUpdateCores.Add(renderable.Value.RenderCore); } ++i; } } }