public override void Run() { if (snapnumber >= 0) { if (isProcessing) { ProcessIndraFofs(snapnumber); } else { DebugOut.PrintLine("Processing SimDB FOF groups interrupted at snapshot " + snapnumber); } } else { for (Int16 i = firstSnap; i <= lastSnap; i++) { if (isProcessing) { ProcessIndraFofs(i); } else { DebugOut.PrintLine("Processing SimDB FOF groups interrupted at snapshot " + snapnumber); } } } }
public override void Run() { File.Delete("C:\\Users\\crankshaw\\Documents\\fft_info.txt"); if (snapnumber >= 0) { if (isProcessing) { ProcessIndraFFTData(snapnumber); } else { DebugOut.PrintLine("Processing SimDB FFT Data interrupted at snapshot " + snapnumber); } } else { for (Int16 i = firstSnap; i <= lastSnap; i++) { if (isProcessing) { ProcessIndraFFTData(i); } else { DebugOut.PrintLine("Processing SimDB FFT Data interrupted at snapshot " + i); } } } }
public void Run() { try { isProcessing = true; foreach (Process process in processes) { if (isProcessing) { process.Run(); } else { break; } } DebugOut.PrintLine("Processing complete."); DebugOut.PrintLine("SQL bulk insert commands saved to " + SQLCommandsFile); if (SQLCommandsFile != null) { DebugOut.SaveCommands(SQLCommandsFile); } } catch (Exception e) { isProcessing = false; throw e; } isProcessing = false; }
private void backgroundWorker1_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { //if (e.Cancelled) //{ DebugOut.AppendText("\r\n\r\n" + endStatus + "\r\n"); //} if (reviewScores.Count > 0) { //calulate averageScore = reviewScores.Average(); pLiked = PercentLiked(); DebugOut.AppendText("\r\nAudience Score"); //output if (averageScore >= 70) //The full popcorn bucket means the movie received 3.5 stars or higher by users. (3.5 stars = 70%) { DebugOut.AppendText("\r\n\tPopcorn: Full"); } else { DebugOut.AppendText("\r\n\tPopcorn: Spilled"); } DebugOut.AppendText("\r\n\t" + pLiked.ToString("0.00") + "% liked it"); DebugOut.AppendText("\r\n\tAverage Rating: " + (averageScore / 20).ToString("0.0") + "/5 (" + averageScore.ToString("0.00") + "%)"); DebugOut.AppendText("\r\n\tUser Ratings: " + reviewScores.Count); } //reset go button running = false; GObtn.Text = "GO"; }
public void GetWnd(int i_viewCookie, out IntPtr o_hwnd) { #if TSF_DEBUG_OUTPUT using(var dbgout = new DebugOut("{0}()", DebugOut.GetCaller()) ) #endif { if (GetHWnd != null) o_hwnd = GetHWnd(); else o_hwnd = IntPtr.Zero; } }
public override void Run() { if (snapNumber >= 0) { if (isProcessing) { if (writeArrays) { ProcessSnapshotWithArrays(snapNumber); //Console.WriteLine("Processed Arrays"); } else { ProcessSnapshot(snapNumber); } } } else { for (Int16 i = firstSnap; i <= lastSnap; i++) { if (isProcessing) { if (writeArrays) { ProcessSnapshotWithArrays(i); } else { ProcessSnapshot(i); } } else { DebugOut.PrintLine("Processing snapshots Interrupted at snapshot " + i); } } } }
public void ProcessIndraFofs(int isnap) { DebugOut.PrintLine("PROCESSING FOF Groups from Snapshot " + isnap); int skip = 0; //long skip_sub = 0; int filenumber = 0; string groupTabFile = ""; string groupIDFile = ""; int[] GroupLen = new int[0]; int[] GroupOffset = new int[0]; long[] IDs = new long[0]; bool done = false; int Ntask = 0; int nGroups = 0; int totNGroups = 0; int NIds = 0; FOFSummary summary = new FOFSummary(); summary.start = DateTime.Now; while (!done) { groupTabFile = GetSimDBFOFFile(this.inPath, isnap, this.groupTabFilePrefix, filenumber); using (BinaryReader reader = new BinaryReader(new FileStream(groupTabFile, FileMode.Open, FileAccess.Read))) { nGroups = reader.ReadInt32(); NIds = reader.ReadInt32(); totNGroups = reader.ReadInt32(); Ntask = reader.ReadInt32(); if (filenumber == 0) { GroupLen = new int[totNGroups]; GroupOffset = new int[totNGroups]; } if (nGroups > 0) { for (int i = skip; i < skip + nGroups; i++) { GroupLen[i] = reader.ReadInt32(); } for (int i = skip; i < skip + nGroups; i++) { GroupOffset[i] = reader.ReadInt32(); } skip += nGroups; } } DebugOut.PrintLine("Ngroups: " + nGroups + " totNGroups: " + totNGroups + " NIds: " + NIds + " Ntask: " + Ntask); filenumber++; //if (filenumber > lastSnapshotFile) // { // done = true; //} if (filenumber == Ntask) { done = true; } } nGroups = 0; totNGroups = 0; NIds = 0; Ntask = 0; done = false; filenumber = 0; skip = 0; while (!done) { groupIDFile = GetSimDBFOFFile(this.inPath, isnap, this.groupIDFilePrefix, filenumber); using (BinaryReader reader = new BinaryReader(new FileStream(groupIDFile, FileMode.Open, FileAccess.Read))) { nGroups = reader.ReadInt32(); NIds = reader.ReadInt32(); totNGroups = reader.ReadInt32(); Ntask = reader.ReadInt32(); int totNIds = 0; for (int i = 0; i < GroupLen.Length; i++) { totNIds += GroupLen[i]; } if (filenumber == 0) { IDs = new long[totNIds]; } if (NIds > 0) { for (int i = skip; i < skip + NIds; i++) { IDs[i] = reader.ReadInt64(); IDs[i] &= ((((long)1) << 34) - 1); } skip += NIds; } } //DebugOut.PrintLine("Ngroups: " + nGroups + " totNGroups: " + totNGroups + " NIds: " + NIds + " Ntask: " + Ntask); filenumber++; //if (filenumber > lastSnapshotFile) // { // done = true; // } if (filenumber == Ntask) { done = true; } } string outfilepath = this.outPath + "//groups_snap" + isnap; using (SqlBinaryWriter binwriter = new SqlBinaryWriter( new FileStream(outfilepath, FileMode.Create))) { for (int i = 0; i < totNGroups; i++) { if (GroupLen[i] > 0) { long[] curGroupIds = new long[GroupLen[i]]; Array.Copy(IDs, GroupOffset[i], curGroupIds, 0, curGroupIds.Length); // Now add the ID to a bloom filter BloomFilter.Filter <long> filter = new Filter <long>(LoaderConstants.expectedSize); for (int j = 0; j < curGroupIds.Length; j++) { filter.Add(curGroupIds[j]); } binwriter.WriteFoFGroup(curGroupIds, (short)isnap, i, filter); } } } summary.end = DateTime.Now; summary.duration = summary.end - summary.start; summary.numGroups = totNGroups; globals.summary.fofSummary = summary; GC.Collect(); GC.WaitForPendingFinalizers(); DebugOut.PrintLine("Completed writing snapshot: " + isnap); }
public void ProcessIndraFFTData(int isnap) { DebugOut.PrintLine("PROCESSING FOF Groups from Snapshot " + isnap); int L = 128; //TODO eventually, magic number, assume that there will always // be 128 modes? int lhalf = L / 2; float[, ,] fft_re = new float[lhalf + +1, L + 1, L + 1]; float[, ,] fft_im = new float[lhalf + +1, L + 1, L + 1]; //float[, ,] fft_re = new float[L + 1, L + 1, lhalf + +1]; //float[, ,] fft_im = new float[L + 1, L + 1, lhalf + +1]; int nsize = 0; double time2 = 0.0; string fftfile = GetIndraFFTFILE(inPath, isnap, filePrefix, fileExtension); DebugOut.PrintLine(fftfile); using (BinaryReader reader = new BinaryReader(new FileStream(fftfile, FileMode.Open, FileAccess.Read))) { time2 = reader.ReadDouble(); //nsize = reader.ReadSingle(); nsize = reader.ReadInt32(); for (int y = 0; y <= L; y++) { for (int x = 0; x <= L; x++) { for (int z = 0; z <= lhalf; z++) { fft_re[z, x, y] = reader.ReadSingle(); } } } for (int y = 0; y <= L; y++) { for (int x = 0; x <= L; x++) { for (int z = 0; z <= lhalf; z++) { fft_im[z, x, y] = reader.ReadSingle(); } } } } string outfilepath = outPath + "\\" + "trans_" + filePrefix + isnap; using (SqlBinaryWriter binwriter = new SqlBinaryWriter( new FileStream(outfilepath, FileMode.Create))) { binwriter.WriteFFTModes((short)isnap, time2, nsize, fft_re, fft_im); } GC.Collect(); GC.WaitForPendingFinalizers(); DebugOut.PrintLine("Completed writing snapshot: " + isnap); }
private void ProcessSnapshot(short snap) { DebugOut.PrintLine("PROCESSING SNAPSHOT WITHOUT ARRAYS " + snap); // using this only if needed List <SnapFileSummary> results = new List <SnapFileSummary>(); // and make the directory, just to be safe try { //Directory.CreateDirectory(GetSnapDir(outPath, snap)); Directory.CreateDirectory(outPath); } //SUMMARY TODO catch (IOException e) { System.Console.WriteLine(e.Message); } int curFile = firstSnapshotFile; // index of current read/write file int numFile = 0; if (lastSnapshotFile < firstSnapshotFile) { lastSnapshotFile = firstSnapshotFile; numFile = -1; } while (curFile <= lastSnapshotFile) { //DebugOut.PrintLine("..file " + curFile + "/" + lastSnapshotFile); Console.WriteLine("..file " + curFile + "/" + lastSnapshotFile); SnapFileSummary currentFileSummary = new SnapFileSummary(); results.Add(currentFileSummary); currentFileSummary.start = DateTime.Now; string filename = ""; try { filename = GetSnapFile(inPath, snap, snapshotFilePrefix, curFile); currentFileSummary.inFileName = filename; } catch (Exception e) { Console.WriteLine(e.Message); currentFileSummary.badStatus = true; currentFileSummary.statusMessage = e.Message; return; } // now load the file SnapFile curSnap = new SnapFile(filename); // and open the stream for writing using (BinaryWriter binwriter = new BinaryWriter(new FileStream(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), FileMode.Create))) { currentFileSummary.outFileName = GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile); Structs[] parts = new Structs[curSnap.numSample]; // now write each particle into the array for (int i = 0; i < curSnap.numSample; i++) { parts[i].x = curSnap.pos[i, 0]; parts[i].y = curSnap.pos[i, 1]; parts[i].z = curSnap.pos[i, 2]; parts[i].vx = curSnap.vel[i, 0]; parts[i].vy = curSnap.vel[i, 1]; parts[i].vz = curSnap.vel[i, 2]; parts[i].snapnum = snap; parts[i].id = curSnap.id[i]; // add in highest-order bit parts[i].id |= ((UInt64)curSnap.nLargeSims[1]) << 32; // make ph-key parts[i].phkey = GetPHKey(parts[i].x, parts[i].y, parts[i].z); // read hsml, if desired } // now sort before writing files Array.Sort <Structs>(parts, new ParticleComparator()); // and then write output for (int i = 0; i < curSnap.numSample; i++) { parts[i].WriteBinaryNoHsml(binwriter); } // and add a bulk insert //TODO sql commands file DebugOut.AddCommand(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), snapshotTable); DebugOut.PrintLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); } // and set numFiles if (numFile == -1) { numFile = (int)curSnap.numSubfiles - firstSnapshotFile + 1; lastSnapshotFile = numFile - 1; } currentFileSummary.end = DateTime.Now; currentFileSummary.duration = results[curFile].start - results[curFile].end; currentFileSummary.numParticles = curSnap.numSample; curFile++; // avoid outofmemory errors GC.Collect(); GC.WaitForPendingFinalizers(); } globals.summary.setFileSummaries(results); }
private void ProcessSnapshotWithArrays(short snap) { //DebugOut.PrintLine("PROCESSING SNAPSHOT WITH ARRAYS" + snap); // using this only if needed List <SnapFileSummary> results = new List <SnapFileSummary>(); //and make the directory, just to be safe try { Directory.CreateDirectory(outPath); } catch (IOException e) { globals.summary.addError(e.Message); return; } int curFile = firstSnapshotFile; // index of current read/write file int numFile = 0; if (lastSnapshotFile < firstSnapshotFile) { lastSnapshotFile = firstSnapshotFile; numFile = -1; } while (curFile <= lastSnapshotFile) { //DebugOut.PrintLine("..file " + curFile + "/" + lastSnapshotFile); Console.WriteLine("..file " + curFile + "/" + lastSnapshotFile); SnapFileSummary currentFileSummary = new SnapFileSummary(); results.Add(currentFileSummary); currentFileSummary.start = DateTime.Now; currentFileSummary.fileNumber = curFile; string filename = ""; try { filename = GetSnapFile(inPath, snap, snapshotFilePrefix, curFile); currentFileSummary.inFileName = filename; } catch (Exception e) { currentFileSummary.badStatus = true; currentFileSummary.statusMessage = e.Message; globals.summary.setFileSummaries(results); return; } // now load the file try { SnapFile curSnap = new SnapFile(filename); // and open the stream for writing using (SqlBinaryWriter binwriter = new SqlBinaryWriter(new FileStream(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), FileMode.Create))) { Structs[] parts = new Structs[curSnap.numSample]; //record destination path in file summary currentFileSummary.outFileName = GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile); // now write each particle into the array for (int i = 0; i < curSnap.numSample; i++) { parts[i].x = curSnap.pos[i, 0]; parts[i].y = curSnap.pos[i, 1]; parts[i].z = curSnap.pos[i, 2]; parts[i].vx = curSnap.vel[i, 0]; parts[i].vy = curSnap.vel[i, 1]; parts[i].vz = curSnap.vel[i, 2]; parts[i].snapnum = snap; parts[i].id = curSnap.id[i]; // add in highest-order bit parts[i].id |= ((UInt64)curSnap.nLargeSims[1]) << 32; // make ph-key parts[i].phkey = GetPHKey(parts[i].x, parts[i].y, parts[i].z); } // now sort before writing files // TODO //TBD this may not be necessary if the particles // are sorted in the files already. Array.Sort <Structs>(parts, new ParticleComparator()); Cell cell = new Cell(snap); int currentPHkey = -1; for (int i = 0; i < curSnap.numSample; i++) { if (parts[i].phkey != currentPHkey) { if (cell.Count > 0) { binwriter.WriteCell(cell); } currentPHkey = parts[i].phkey; cell.Init(currentPHkey); } cell.AddToCell(parts[i]); } if (cell.Count > 0) { binwriter.WriteCell(cell); } // and add a bulk insert //DebugOut.AddCommand(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), snapshotTable); Console.WriteLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); DebugOut.PrintLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); } // and set numFiles if (numFile == -1) { numFile = (int)curSnap.numSubfiles - firstSnapshotFile; lastSnapshotFile = (int)curSnap.numSubfiles - 1; } currentFileSummary.end = DateTime.Now; currentFileSummary.duration = results[curFile].end - results[curFile].start; currentFileSummary.numParticles = curSnap.numSample; if (curSnap.numSample < (int)(0.01 * LoaderConstants.particlesPerSnap)) { currentFileSummary.warning = true; currentFileSummary.warningMessage = "Less than 1% of particles in snapshot in this file"; } if (curSnap.numSample > LoaderConstants.particlesPerSnap) { currentFileSummary.warning = true; currentFileSummary.warningMessage = "More particles in file than are supposed to be in snapshot"; } if (currentFileSummary.outFileName != null) { globals.summary.AddSnapBCPCommand(currentFileSummary.outFileName); } curFile++; // avoid outofmemory errors GC.Collect(); GC.WaitForPendingFinalizers(); } catch (Exception e) { currentFileSummary.badStatus = true; currentFileSummary.statusMessage = e.Message; globals.summary.setFileSummaries(results); return; } } globals.summary.setFileSummaries(results); }