private void ProcessSnapshotWithArrays(short snap) { //DebugOut.PrintLine("PROCESSING SNAPSHOT WITH ARRAYS" + snap); // using this only if needed bool writeParticlesBool = true; bool writeIndexBool = true; List<SnapFileSummary> results = new List<SnapFileSummary>(); //and make the directory, just to be safe try { Directory.CreateDirectory(outPath); } catch (IOException e) { globals.summary.addError(e.Message); return; } int curFile = firstSnapshotFile; // index of current read/write file int numFile = 0; if (lastSnapshotFile < firstSnapshotFile) { lastSnapshotFile = firstSnapshotFile; numFile = -1; } while (curFile <= lastSnapshotFile) { //DebugOut.PrintLine("..file " + curFile + "/" + lastSnapshotFile); Console.WriteLine("..file " + curFile + "/" + lastSnapshotFile); SnapFileSummary currentFileSummary = new SnapFileSummary(); results.Add(currentFileSummary); currentFileSummary.start = DateTime.Now; currentFileSummary.fileNumber = curFile; string filename = ""; try { filename = GetSnapFile(inPath, snap, snapshotFilePrefix, curFile); currentFileSummary.inFileName = filename; } catch (Exception e) { currentFileSummary.badStatus = true; currentFileSummary.statusMessage = e.Message; globals.summary.setFileSummaries(results); return; } // now load the file #region mainloop try { SnapFile curSnap = new SnapFile(filename); // and open the stream for writing using (SqlBinaryWriter particleBinWriter = new SqlBinaryWriter(new FileStream(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), FileMode.Create)), indexBinWriter = new SqlBinaryWriter(new FileStream(GetSnapDefault(outPath, snap, "rev_index_", curFile), FileMode.Create))) { Structs[] parts = new Structs[curSnap.numSample]; //record destination path in file summary currentFileSummary.outFileName = GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile); currentFileSummary.indexFileName = GetSnapDefault(outPath, snap, "rev_index_", curFile); // now write each particle into the array for (int i = 0; i < curSnap.numSample; i++) { parts[i].x = curSnap.pos[i, 0]; parts[i].y = curSnap.pos[i, 1]; parts[i].z = curSnap.pos[i, 2]; parts[i].vx = curSnap.vel[i, 0]; parts[i].vy = curSnap.vel[i, 1]; parts[i].vz = curSnap.vel[i, 2]; parts[i].snapnum = snap; parts[i].id = curSnap.id[i]; // add in highest-order bit parts[i].id |= ((UInt64)curSnap.nLargeSims[1]) << 32; // make ph-key parts[i].phkey = GetPHKey(parts[i].x, parts[i].y, parts[i].z); } // now sort before writing files // TODO // TBD this may not be necessary if the particles // are sorted in the files already. Array.Sort<Structs>(parts, new ParticleComparator()); /* * Sue: This is where you will want to put in a partitioning key * Every time there is a call to particleBinWriter.WriteCell(cell), a single row is written. * The best way to do it would be to just add an argument to the WriteCell method * which is in SqlBinaryWriter.cs on line 113 */ Cell cell = new Cell(snap); int currentPHkey = -1; LoaderParamSingleton pars = LoaderParamSingleton.getInstance(); for (int i = 0; i < curSnap.numSample; i++) { if (parts[i].phkey != currentPHkey) { if (cell.Count > 0) { // TODO(INDEX) create Cell index here if (writeParticlesBool) { particleBinWriter.WriteCell(cell); } //********************************************************* if (writeIndexBool) { List<ReverseIndexEntry> index = cell.createIndex(); indexBinWriter.WriteReverseIndex(index); } //********************************************************* /* if (pars.firstSnapLoaded) { foreach (ReverseIndexEntry entry in index) { entry.WriteBinaryFirstSnap(indexBinWriter); } } else { foreach (ReverseIndexEntry entry in index) { entry.WriteBinaryForMerge(indexBinWriter); } }*/ } currentPHkey = parts[i].phkey; cell.Init(currentPHkey); } cell.AddToCell(parts[i]); } if (cell.Count > 0) { if (writeParticlesBool) { particleBinWriter.WriteCell(cell); } //********************************************************* if (writeIndexBool) { List<ReverseIndexEntry> index = cell.createIndex(); indexBinWriter.WriteReverseIndex(index); } //********************************************************* /*if (pars.firstSnapLoaded) { foreach (ReverseIndexEntry entry in index) { entry.WriteBinaryFirstSnap(indexBinWriter); } } else { foreach (ReverseIndexEntry entry in index) { entry.WriteBinaryForMerge(indexBinWriter); } } */ } // and add a bulk insert //DebugOut.AddCommand(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), snapshotTable); //Console.WriteLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); //DebugOut.PrintLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); } // and set numFiles if (numFile == -1) { numFile = (int)curSnap.numSubfiles - firstSnapshotFile; lastSnapshotFile = (int)curSnap.numSubfiles - 1; } currentFileSummary.end = DateTime.Now; currentFileSummary.duration = results[curFile].end - results[curFile].start; currentFileSummary.numParticles = curSnap.numSample; if (curSnap.numSample < (int)(0.01 * LoaderParamSingleton.getInstance().particlesPerSnap)) { currentFileSummary.warning = true; currentFileSummary.warningMessage = "Less than 1% of particles in snapshot in this file"; } if (curSnap.numSample > LoaderParamSingleton.getInstance().particlesPerSnap) { currentFileSummary.warning = true; currentFileSummary.warningMessage = "More particles in file than are supposed to be in snapshot"; } if(currentFileSummary.outFileName != null) { globals.summary.AddSnapBCPCommand(currentFileSummary.outFileName); globals.summary.AddIndexBCPCommand(currentFileSummary.indexFileName); } curFile++; // avoid outofmemory errors GC.Collect(); GC.WaitForPendingFinalizers(); } catch (Exception e) { currentFileSummary.badStatus = true; currentFileSummary.statusMessage = e.Message; globals.summary.setFileSummaries(results); return; } #endregion mainloop } globals.summary.setFileSummaries(results); }
private void ProcessSnapshot(short snap) { DebugOut.PrintLine("PROCESSING SNAPSHOT " + snap); // using this only if needed HsmlReader hsmlReader = new HsmlReader(); // and make the directory, just to be safe try { Directory.CreateDirectory(GetSnapDir(outPath,snap)); } catch (IOException e) { System.Console.WriteLine(e.Message); } int curFile = 0; // index of current read/write file int numFiles = 1; // total number of files if (snapshotFile >= 0) { curFile = snapshotFile; numFiles = snapshotFile+1; } if (useHsml) hsmlReader = new HsmlReader(GetHsmlPath(inPath, snap)); while (curFile < numFiles) { DebugOut.PrintLine("..file " + curFile + "/" + numFiles); string filename = ""; try { filename = GetSnapFile(inPath, snap, curFile); } catch (Exception e) { MessageBox.Show(e.Message); return; } // now load the file SnapFile curSnap = new SnapFile(filename, samplingRate); // and open the stream for writing using (SqlBinaryWriter binwriter = new SqlBinaryWriter(new FileStream(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), FileMode.Create))) { Structs[] parts = new Structs[curSnap.numSample]; // now write each particle into the array for (int i = 0; i < curSnap.numSample; i++) { parts[i].x = curSnap.pos[i, 0]; parts[i].y = curSnap.pos[i, 1]; parts[i].z = curSnap.pos[i, 2]; parts[i].vx = curSnap.vel[i, 0]; parts[i].vy = curSnap.vel[i, 1]; parts[i].vz = curSnap.vel[i, 2]; parts[i].snapnum = snap; parts[i].id = curSnap.id[i]; // add in highest-order bit parts[i].id |= ((UInt64)curSnap.nLargeSims[1]) << 32; // make ph-key parts[i].phkey = GetPHKey(parts[i].x, parts[i].y, parts[i].z); // read hsml, if desired if (useHsml) // TODO fix this part for random sampling { parts[i].hsml = hsmlReader.GetHsml(); parts[i].veldisp = hsmlReader.GetVelDisp(); parts[i].density = hsmlReader.GetDensity(); // and advance read pointer hsmlReader.Next(); } } // now sort before writing files Array.Sort<Structs>(parts, new ParticleComparator()); // and then write output /* if (useHsml) for (int i = 0; i < curSnap.numSample; i++) parts[i].WriteBinary(binwriter); else for (int i = 0; i < curSnap.numSample; i++) parts[i].WriteBinaryNoHsml(binwriter); */ for (int i = 0; i < curSnap.numSample; i++) binwriter.Write(parts[i]); // and add a bulk insert DebugOut.AddCommand(GetSnapDefault(outPath, snap, snapshotFilePrefix,curFile), snapshotTable); DebugOut.PrintLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); } // and set numFiles numFiles = (int)curSnap.numSubfiles; curFile++; // avoid outofmemory errors GC.Collect(); GC.WaitForPendingFinalizers(); } }
public void ProcessIndraFofs(int isnap) { DebugOut.PrintLine("PROCESSING FOF Groups from Snapshot " + isnap); int skip = 0; //long skip_sub = 0; int filenumber = 0; string groupTabFile = ""; string groupIDFile = ""; int[] GroupLen = new int[0]; int[] GroupOffset = new int[0]; long[] IDs = new long[0]; bool done = false; int Ntask = 0; int nGroups = 0; int totNGroups = 0; int NIds = 0; FOFSummary summary = new FOFSummary(); summary.start = DateTime.Now; while (!done) { groupTabFile = GetSimDBFOFFile(this.inPath, isnap, this.groupTabFilePrefix, filenumber); using (BinaryReader reader = new BinaryReader(new FileStream(groupTabFile, FileMode.Open, FileAccess.Read))) { nGroups = reader.ReadInt32(); NIds = reader.ReadInt32(); totNGroups = reader.ReadInt32(); Ntask = reader.ReadInt32(); if (filenumber == 0) { GroupLen = new int[totNGroups]; GroupOffset = new int[totNGroups]; } if (nGroups > 0) { for (int i = skip; i < skip + nGroups; i++) { GroupLen[i] = reader.ReadInt32(); } for (int i = skip; i < skip + nGroups; i++) { GroupOffset[i] = reader.ReadInt32(); } skip += nGroups; } } DebugOut.PrintLine("Ngroups: " + nGroups + " totNGroups: " + totNGroups + " NIds: " + NIds + " Ntask: " + Ntask); filenumber++; //if (filenumber > lastSnapshotFile) // { // done = true; //} if (filenumber == Ntask) { done = true; } } nGroups = 0; totNGroups = 0; NIds = 0; Ntask = 0; done = false; filenumber = 0; skip = 0; while (!done) { groupIDFile = GetSimDBFOFFile(this.inPath, isnap, this.groupIDFilePrefix, filenumber); using (BinaryReader reader = new BinaryReader(new FileStream(groupIDFile, FileMode.Open, FileAccess.Read))) { nGroups = reader.ReadInt32(); NIds = reader.ReadInt32(); totNGroups = reader.ReadInt32(); Ntask = reader.ReadInt32(); int totNIds = 0; for (int i = 0; i < GroupLen.Length; i++) { totNIds += GroupLen[i]; } if (filenumber == 0) { IDs = new long[totNIds]; } if (NIds > 0) { for (int i = skip; i < skip + NIds; i++) { IDs[i] = reader.ReadInt64(); IDs[i] &= ((((long)1) << 34) - 1); } skip += NIds; } } //DebugOut.PrintLine("Ngroups: " + nGroups + " totNGroups: " + totNGroups + " NIds: " + NIds + " Ntask: " + Ntask); filenumber++; //if (filenumber > lastSnapshotFile) // { // done = true; // } if (filenumber == Ntask) { done = true; } } string outfilepath = this.outPath + "//groups_snap" + isnap; using (SqlBinaryWriter binwriter = new SqlBinaryWriter( new FileStream(outfilepath, FileMode.Create))) { for (int i = 0; i < totNGroups; i++) { if (GroupLen[i] > 0) { long[] curGroupIds = new long[GroupLen[i]]; Array.Copy(IDs, GroupOffset[i], curGroupIds, 0, curGroupIds.Length); // Now add the ID to a bloom filter - NOT USED /*BloomFilter.Filter<long> filter = new Filter<long>(LoaderParamSingleton.getInstance().expectedSize); for (int j = 0; j < curGroupIds.Length; j++) { filter.Add(curGroupIds[j]); } particleBinWriter.WriteFoFGroup(curGroupIds, (short)isnap, i, filter);*/ binwriter.WriteFoFGroup(curGroupIds, (short)isnap, i); } } } summary.end = DateTime.Now; summary.duration = summary.end - summary.start; summary.numGroups = totNGroups; globals.summary.fofSummary = summary; GC.Collect(); GC.WaitForPendingFinalizers(); DebugOut.PrintLine("Completed writing snapshot: " + isnap); }
public void ProcessIndraFFTData(int isnap) { DebugOut.PrintLine("PROCESSING FOF Groups from Snapshot " + isnap); int L = 128; //TODO eventually, magic number, assume that there will always // be 128 modes? int lhalf = L / 2; float[, ,] fft_re = new float[lhalf + +1, L + 1, L + 1]; float[, ,] fft_im = new float[lhalf + +1, L + 1, L + 1]; //float[, ,] fft_re = new float[L + 1, L + 1, lhalf + +1]; //float[, ,] fft_im = new float[L + 1, L + 1, lhalf + +1]; int nsize = 0; double time2 = 0.0; string fftfile = GetIndraFFTFILE(inPath, isnap, filePrefix, fileExtension); DebugOut.PrintLine(fftfile); using (BinaryReader reader = new BinaryReader(new FileStream(fftfile, FileMode.Open, FileAccess.Read))) { time2 = reader.ReadDouble(); //nsize = reader.ReadSingle(); nsize = reader.ReadInt32(); for (int y = 0; y <= L; y++) { for (int x = 0; x <= L; x++) { for (int z = 0; z <= lhalf; z++) { fft_re[z, x, y] = reader.ReadSingle(); } } } for (int y = 0; y <= L; y++) { for (int x = 0; x <= L; x++) { for (int z = 0; z <= lhalf; z++) { fft_im[z, x, y] = reader.ReadSingle(); } } } } string outfilepath = outPath + "\\" + "trans_" + filePrefix + isnap; using (SqlBinaryWriter binwriter = new SqlBinaryWriter( new FileStream(outfilepath, FileMode.Create))) { binwriter.WriteFFTModes((short)isnap, time2, nsize, fft_re, fft_im); } GC.Collect(); GC.WaitForPendingFinalizers(); DebugOut.PrintLine("Completed writing snapshot: " + isnap); }
private void ProcessSnapshotWithArrays(short snap) { //DebugOut.PrintLine("PROCESSING SNAPSHOT WITH ARRAYS" + snap); // using this only if needed List<SnapFileSummary> results = new List<SnapFileSummary>(); //and make the directory, just to be safe try { Directory.CreateDirectory(outPath); } catch (IOException e) { globals.summary.addError(e.Message); return; } int curFile = firstSnapshotFile; // index of current read/write file int numFile = 0; if (lastSnapshotFile < firstSnapshotFile) { lastSnapshotFile = firstSnapshotFile; numFile = -1; } while (curFile <= lastSnapshotFile) { //DebugOut.PrintLine("..file " + curFile + "/" + lastSnapshotFile); Console.WriteLine("..file " + curFile + "/" + lastSnapshotFile); SnapFileSummary currentFileSummary = new SnapFileSummary(); results.Add(currentFileSummary); currentFileSummary.start = DateTime.Now; currentFileSummary.fileNumber = curFile; string filename = ""; try { filename = GetSnapFile(inPath, snap, snapshotFilePrefix, curFile); currentFileSummary.inFileName = filename; } catch (Exception e) { currentFileSummary.badStatus = true; currentFileSummary.statusMessage = e.Message; globals.summary.setFileSummaries(results); return; } // now load the file try { SnapFile curSnap = new SnapFile(filename); // and open the stream for writing using (SqlBinaryWriter binwriter = new SqlBinaryWriter(new FileStream(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), FileMode.Create))) { Structs[] parts = new Structs[curSnap.numSample]; //record destination path in file summary currentFileSummary.outFileName = GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile); // now write each particle into the array for (int i = 0; i < curSnap.numSample; i++) { parts[i].x = curSnap.pos[i, 0]; parts[i].y = curSnap.pos[i, 1]; parts[i].z = curSnap.pos[i, 2]; parts[i].vx = curSnap.vel[i, 0]; parts[i].vy = curSnap.vel[i, 1]; parts[i].vz = curSnap.vel[i, 2]; parts[i].snapnum = snap; parts[i].id = curSnap.id[i]; // add in highest-order bit parts[i].id |= ((UInt64)curSnap.nLargeSims[1]) << 32; // make ph-key parts[i].phkey = GetPHKey(parts[i].x, parts[i].y, parts[i].z); } // now sort before writing files // TODO //TBD this may not be necessary if the particles // are sorted in the files already. Array.Sort<Structs>(parts, new ParticleComparator()); Cell cell = new Cell(snap); int currentPHkey = -1; for (int i = 0; i < curSnap.numSample; i++) { if (parts[i].phkey != currentPHkey) { if (cell.Count > 0) binwriter.WriteCell(cell); currentPHkey = parts[i].phkey; cell.Init(currentPHkey); } cell.AddToCell(parts[i]); } if (cell.Count > 0) binwriter.WriteCell(cell); // and add a bulk insert //DebugOut.AddCommand(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), snapshotTable); Console.WriteLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); DebugOut.PrintLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); } // and set numFiles if (numFile == -1) { numFile = (int)curSnap.numSubfiles - firstSnapshotFile; lastSnapshotFile = (int)curSnap.numSubfiles - 1; } currentFileSummary.end = DateTime.Now; currentFileSummary.duration = results[curFile].end - results[curFile].start; currentFileSummary.numParticles = curSnap.numSample; if (curSnap.numSample < (int)(0.01 * LoaderConstants.particlesPerSnap)) { currentFileSummary.warning = true; currentFileSummary.warningMessage = "Less than 1% of particles in snapshot in this file"; } if (curSnap.numSample > LoaderConstants.particlesPerSnap) { currentFileSummary.warning = true; currentFileSummary.warningMessage = "More particles in file than are supposed to be in snapshot"; } if(currentFileSummary.outFileName != null) { globals.summary.AddSnapBCPCommand(currentFileSummary.outFileName); } curFile++; // avoid outofmemory errors GC.Collect(); GC.WaitForPendingFinalizers(); } catch (Exception e) { currentFileSummary.badStatus = true; currentFileSummary.statusMessage = e.Message; globals.summary.setFileSummaries(results); return; } } globals.summary.setFileSummaries(results); }
private void ProcessSnapshotWithArrays(short snap) { DebugOut.PrintLine("PROCESSING SNAPSHOT WITH ARRAYS" + snap); // using this only if needed HsmlReader hsmlReader = new HsmlReader(); // and make the directory, just to be safe try { Directory.CreateDirectory(GetSnapDir(outPath, snap)); } catch (IOException e) { System.Console.WriteLine(e.Message); } int curFile = firstSnapshotFile; // index of current read/write file int numFile = 0; if (useHsml) hsmlReader = new HsmlReader(GetHsmlPath(inPath, snap)); if (lastSnapshotFile < firstSnapshotFile) { lastSnapshotFile = firstSnapshotFile; numFile = -1; } while (curFile <= lastSnapshotFile) { DebugOut.PrintLine("..file " + curFile + "/" + lastSnapshotFile); string filename = ""; try { filename = GetSnapFile(inPath, snap, snapshotFilePrefix, curFile); } catch (Exception e) { MessageBox.Show(e.Message); return; } // now load the file SnapFile curSnap = new SnapFile(filename, samplingRate); // and open the stream for writing using (SqlBinaryWriter binwriter = new SqlBinaryWriter(new FileStream(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), FileMode.Create))) { Structs[] parts = new Structs[curSnap.numSample]; // now write each particle into the array for (int i = 0; i < curSnap.numSample; i++) { parts[i].x = curSnap.pos[i, 0]; parts[i].y = curSnap.pos[i, 1]; parts[i].z = curSnap.pos[i, 2]; parts[i].vx = curSnap.vel[i, 0]; parts[i].vy = curSnap.vel[i, 1]; parts[i].vz = curSnap.vel[i, 2]; parts[i].snapnum = snap; parts[i].id = curSnap.id[i]; // add in highest-order bit parts[i].id |= ((UInt64)curSnap.nLargeSims[1]) << 32; // make ph-key parts[i].phkey = GetPHKey(parts[i].x, parts[i].y, parts[i].z); // read hsml, if desired if (useHsml) // TODO fix this part for random sampling { parts[i].hsml = hsmlReader.GetHsml(); parts[i].veldisp = hsmlReader.GetVelDisp(); parts[i].density = hsmlReader.GetDensity(); // and advance read pointer hsmlReader.Next(); } } // now sort before writing files // TBD this may not be necessary if the particles // are sorted in the files already. Array.Sort<Structs>(parts, new ParticleComparator()); Cell cell = new Cell(snap); int currentPHkey = -1; for (int i = 0; i < curSnap.numSample; i++) { if (parts[i].phkey != currentPHkey) { if(cell.Count > 0) binwriter.Write(cell); currentPHkey = parts[i].phkey; cell.Init(currentPHkey); } cell.Add(parts[i]); } if (cell.Count > 0) binwriter.Write(cell); // and add a bulk insert DebugOut.AddCommand(GetSnapDefault(outPath, snap, snapshotFilePrefix, curFile), snapshotTable); DebugOut.PrintLine("..wrote " + curSnap.numSample + "/" + curSnap.numTotals[1] + " points"); } // and set numFiles if (numFile == -1) { numFile = (int)curSnap.numSubfiles - firstSnapshotFile + 1; lastSnapshotFile = numFile - 1; } curFile++; // avoid outofmemory errors GC.Collect(); GC.WaitForPendingFinalizers(); } }
public void ProcessSimDBFofs(int isnap) { DebugOut.PrintLine("PROCESSING FOF Groups from Snapshot " + isnap); // and make the directory, just to be safe try { Directory.CreateDirectory(GetSnapDir(this.outPath, isnap)); } catch (IOException e) { System.Console.WriteLine(e.Message); } int skip = 0; //long skip_sub = 0; int filenumber = 0; string groupTabFile = ""; string groupIDFile = ""; int[] GroupLen = new int[0]; int[] GroupOffset = new int[0]; int[] IDs = new int[0]; bool done = false; int Ntask = 0; int nGroups = 0; int totNGroups = 0; int NIds = 0; while(!done) { groupTabFile = GetSimDBFOFFile(this.inPath, isnap, this.groupTabFilePrefix, filenumber); using (BinaryReader reader = new BinaryReader(new FileStream(groupTabFile, FileMode.Open, FileAccess.Read))) { nGroups = reader.ReadInt32(); NIds = reader.ReadInt32(); totNGroups = reader.ReadInt32(); Ntask = reader.ReadInt32(); if(filenumber == 0) { GroupLen = new int[totNGroups]; GroupOffset = new int[totNGroups]; } if (nGroups > 0) { for (int i = skip; i < skip + nGroups; i++) { GroupLen[i] = reader.ReadInt32(); } for (int i = skip; i < skip + nGroups; i++) { GroupOffset[i] = reader.ReadInt32(); } skip += nGroups; } } //DebugOut.PrintLine("Ngroups: " + nGroups + " totNGroups: " + totNGroups + " NIds: " + NIds + " Ntask: " + Ntask); filenumber++; /*if (filenumber > lastSnapshotFile) { done = true; }*/ if (filenumber == Ntask) { done = true; } } nGroups = 0; totNGroups = 0; NIds = 0; Ntask = 0; done = false; filenumber = 0; skip = 0; while (!done) { groupIDFile = GetSimDBFOFFile(this.inPath, isnap, this.groupIDFilePrefix, filenumber); using (BinaryReader reader = new BinaryReader(new FileStream(groupIDFile, FileMode.Open, FileAccess.Read))) { nGroups = reader.ReadInt32(); NIds = reader.ReadInt32(); totNGroups = reader.ReadInt32(); Ntask = reader.ReadInt32(); //Will I get integer overflow here??? int totNIds = 0; for (int i = 0; i < GroupLen.Length; i++) { totNIds += GroupLen[i]; } if (filenumber == 0) { IDs = new int[totNIds]; } if (NIds > 0) { for (int i = skip; i < skip + NIds; i++) { IDs[i] = reader.ReadInt32(); } skip += NIds; } } //DebugOut.PrintLine("Ngroups: " + nGroups + " totNGroups: " + totNGroups + " NIds: " + NIds + " Ntask: " + Ntask); filenumber++; /*if (filenumber > lastSnapshotFile) { done = true; }*/ if (filenumber == Ntask) { done = true; } } string outfilepath = this.outPath + "//groups_snap" + isnap; using (SqlBinaryWriter binwriter = new SqlBinaryWriter( new FileStream(outfilepath, FileMode.Create))) { for (int i = 0; i < totNGroups; i++) { if (GroupLen[i] > 0) { int[] curGroupIds = new int[GroupLen[i]]; Array.Copy(IDs, GroupOffset[i], curGroupIds, 0, curGroupIds.Length); binwriter.Write(curGroupIds, (short)isnap, i); } } } GC.Collect(); GC.WaitForPendingFinalizers(); DebugOut.PrintLine("Completed writing snapshot: " + isnap); }