public void save(string filename) { FileStream stream = new FileStream(filename, FileMode.Create); BEBinaryWriter writer = new BEBinaryWriter(stream, Encoding.UTF8); // Retrieve information uint file_size = HEADER_SIZE; uint table_offset = 0; foreach (KeyValuePair <string, T> p in m_entries) { table_offset += p.Value.get_size(); file_size += p.Value.get_size(); file_size += ENTRY_SIZE; file_size += (uint)p.Key.Length + 1; } // Write header data writer.Write(file_size); writer.Write(table_offset); writer.Write((uint)0); writer.Write((uint)m_entries.Count); writer.Write((uint)0); writer.Write((uint)0); writer.Write((uint)0); writer.Write((uint)0); // Write event data, record offsets SortedDictionary <string, uint> entry_offsets = new SortedDictionary <string, uint>(); foreach (KeyValuePair <string, T> p in m_entries) { entry_offsets.Add(p.Key, (uint)stream.Position - HEADER_SIZE); p.Value.save(writer); } // Write table data uint current_string_offset = 0; foreach (KeyValuePair <string, uint> p in entry_offsets) // expected to iterate the same as m_entries { writer.Write(p.Value); writer.Write(current_string_offset); current_string_offset += (uint)p.Key.Length + 1; } // Write string data foreach (KeyValuePair <string, uint> p in entry_offsets) // expected to iterate the same as m_entries { writer.Write(p.Key.ToCharArray()); writer.Write('\0'); } writer.Close(); stream.Close(); }