private void DoBuild() { // ******************************************************************** // ************************ Adjust These Paths ************************ // ******************************************************************** allFragDatFile.CreateFile(Path.Combine(txtOutputFolder.Text, "All.frags"), chkCompressOutput.Checked ? FragDatListFile.CompressionType.DeflateStream : FragDatListFile.CompressionType.None); createObjectFragDatFile.CreateFile(Path.Combine(txtOutputFolder.Text, "CreateObject.frags"), chkCompressOutput.Checked ? FragDatListFile.CompressionType.DeflateStream : FragDatListFile.CompressionType.None); // Do not parallel this search foreach (var currentFile in filesToProcess) { if (searchAborted || Disposing || IsDisposed) { break; } try { ProcessFileForBuild(currentFile); } catch (Exception ex) { MessageBox.Show("File failed to process with exception: " + Environment.NewLine + ex, "Exception", MessageBoxButtons.OK, MessageBoxIcon.Error); } } // ******************************************************************** // ****************************** Cleanup ***************************** // ******************************************************************** allFragDatFile.CloseFile(); createObjectFragDatFile.CloseFile(); }
private void ProcessFileForExamination(string fileName) { var fragDatListFile = new FragDatListFile(); if (!fragDatListFile.OpenFile(fileName)) { return; } var itemTypesToParse = new List <ITEM_TYPE>(); var itemTypeKeys = new Dictionary <ITEM_TYPE, List <string> >(); var itemTypeStreamWriters = new Dictionary <ITEM_TYPE, StreamWriter>(); // If you only want to output a single item_type, you can change this code foreach (ITEM_TYPE itemType in Enum.GetValues(typeof(ITEM_TYPE))) { itemTypesToParse.Add(itemType); itemTypeKeys[itemType] = new List <string>(); itemTypeStreamWriters[itemType] = new StreamWriter(Path.Combine(txtOutputFolder.Text, itemType + ".csv.temp")); } try { TreeView treeView = new TreeView(); while (true) { if (searchAborted || Disposing || IsDisposed) { return; } KeyValuePair <string, List <FragDatListFile.FragDatInfo> > kvp; if (!fragDatListFile.TryReadNext(out kvp)) { break; } foreach (var frag in kvp.Value) { fragmentsProcessed++; try { // ******************************************************************** // ********************** CUSTOM PROCESSING CODE ********************** // ******************************************************************** if (frag.Data.Length <= 4) { continue; } BinaryReader fragDataReader = new BinaryReader(new MemoryStream(frag.Data)); var messageCode = fragDataReader.ReadUInt32(); if (messageCode == 0xF745) // Create Object { var parsed = CM_Physics.CreateObject.read(fragDataReader); if (!itemTypesToParse.Contains(parsed.wdesc._type)) { continue; } totalHits++; // This bit of trickery uses the existing tree view parser code to create readable output, which we can then convert to csv treeView.Nodes.Clear(); parsed.contributeToTreeView(treeView); if (treeView.Nodes.Count == 1) { var lineItems = new string[256]; int lineItemCount = 0; ProcessNode(treeView.Nodes[0], itemTypeKeys[parsed.wdesc._type], null, lineItems, ref lineItemCount); var sb = new StringBuilder(); for (int i = 0; i < lineItemCount; i++) { if (i > 0) { sb.Append(','); } var output = lineItems[i]; // Format the value for CSV output, if needed. // We only do this for certain columns. This is very time consuming if (output != null && itemTypeKeys[parsed.wdesc._type][i].EndsWith("name")) { if (output.Contains(",") || output.Contains("\"") || output.Contains("\r") || output.Contains("\n")) { var sb2 = new StringBuilder(); sb2.Append("\""); foreach (char nextChar in output) { sb2.Append(nextChar); if (nextChar == '"') { sb2.Append("\""); } } sb2.Append("\""); output = sb2.ToString(); } } if (output != null) { sb.Append(output); } } itemTypeStreamWriters[parsed.wdesc._type].WriteLine(sb.ToString()); } } } catch (EndOfStreamException) // This can happen when a frag is incomplete and we try to parse it { totalExceptions++; } } } } finally { foreach (var streamWriter in itemTypeStreamWriters.Values) { streamWriter.Close(); } fragDatListFile.CloseFile(); Interlocked.Increment(ref filesProcessed); } // Read in the temp file and save it to a new file with the column headers foreach (var kvp in itemTypeKeys) { if (kvp.Value.Count > 0) { using (var writer = new StreamWriter(Path.Combine(txtOutputFolder.Text, kvp.Key + ".csv"))) { var sb = new StringBuilder(); for (int i = 0; i < kvp.Value.Count; i++) { if (i > 0) { sb.Append(','); } sb.Append(kvp.Value[i] ?? String.Empty); } writer.WriteLine(sb.ToString()); using (var reader = new StreamReader(Path.Combine(txtOutputFolder.Text, kvp.Key + ".csv.temp"))) { string line; while ((line = reader.ReadLine()) != null) { writer.WriteLine(line); } } } } File.Delete(Path.Combine(txtOutputFolder.Text, kvp.Key + ".csv.temp")); } }