public void CompressedAppendBug4() { string fileName = GetBinFileName(); if (!AllowCreate) { return; } using (var f = new BinCompressedSeriesFile <_CmplxIdx, _4Flds_ComplxIdx>(fileName)) { f.UniqueIndexes = false; f.InitializeNewFile(); f.AppendData(Data <_4Flds_ComplxIdx>(10, 20000)); TestUtils.CollectionAssertEqual( Data <_4Flds_ComplxIdx>(10, 20000), f.Stream(new _CmplxIdx { a = 0 }, new _CmplxIdx { a = 50000 }), "#1"); for (int ix = 0; ix < 5000; ix++) { f.AppendData(Data <_4Flds_ComplxIdx>(20000 + 5 * ix, 20000 + 5 * (ix + 1))); } } }
public void Run() { // Create filenames, deleting existing files if exist string filename = CreateFilename(); // // Set up sample data. // Note that we might get minor rounding errors when storing. // The Equals implementation of the Item accounts for that. // const int itemCount = 10000; IEnumerable <ArraySegment <Item> > data = Utils.GenerateData( 0, itemCount, i => new Item(i)); // Create new BinCompressedSeriesFile file that stores a sequence of Item structs // The file is indexed by a long value inside Item marked with the [Index] attribute. // See the Item struct declaration using (var bf = new BinCompressedSeriesFile <long, Item>(filename)) { // Automatically pick the constructor that would set all the public fields in the struct var cmpxFld = ((ComplexField)bf.RootField); cmpxFld.PopulateFields(ComplexField.Mode.Constructor | ComplexField.Mode.Fields); Console.WriteLine("Serialized Fields:\n {0}\n", string.Join(Environment.NewLine + " ", cmpxFld.Fields)); Console.WriteLine("Deserialized with constrtuctor:\n {0}\n", cmpxFld.Constructor); bf.InitializeNewFile(); // Finish new file initialization and create an empty file bf.AppendData(data); // // Verify that the created files are identical (use the default bitwise value type Equals) // if (!bf.Stream().SequenceEqual(data.Stream())) { throw new BinaryFileException("File does not have the right data"); } Console.WriteLine("File {0} created with {1,10:#,#} bytes", filename, bf.BaseStream.Length); } // // Check that the settings are stored ok in the file and can be re-initialized on open // using (var bf1 = (IWritableFeed <long, Item>)BinaryFile.Open(filename)) { if (!bf1.Stream().SequenceEqual(data.Stream())) { throw new BinaryFileException("File does not have the right data on the second check"); } } // cleanup CreateFilename(); }
public void Run() { // Create filenames, deleting existing files if exist string filename = CreateFilename(); // // Set up sample data. // Note that we might get minor rounding errors when storing. // The Equals implementation of the Item accounts for that. // const int itemCount = 10000; IEnumerable<ArraySegment<Item>> data = Utils.GenerateData( 0, itemCount, i => new Item(i)); // Create new BinCompressedSeriesFile file that stores a sequence of Item structs // The file is indexed by a long value inside Item marked with the [Index] attribute. // See the Item struct declaration using (var bf = new BinCompressedSeriesFile<long, Item>(filename)) { // Automatically pick the constructor that would set all the public fields in the struct var cmpxFld = ((ComplexField) bf.RootField); cmpxFld.PopulateFields(ComplexField.Mode.Constructor | ComplexField.Mode.Fields); Console.WriteLine("Serialized Fields:\n {0}\n", string.Join(Environment.NewLine + " ", cmpxFld.Fields)); Console.WriteLine("Deserialized with constrtuctor:\n {0}\n", cmpxFld.Constructor); bf.InitializeNewFile(); // Finish new file initialization and create an empty file bf.AppendData(data); // // Verify that the created files are identical (use the default bitwise value type Equals) // if (!bf.Stream().SequenceEqual(data.Stream())) throw new BinaryFileException("File does not have the right data"); Console.WriteLine("File {0} created with {1,10:#,#} bytes", filename, bf.BaseStream.Length); } // // Check that the settings are stored ok in the file and can be re-initialized on open // using (var bf1 = (IWritableFeed<long, Item>) BinaryFile.Open(filename)) { if (!bf1.Stream().SequenceEqual(data.Stream())) throw new BinaryFileException("File does not have the right data on the second check"); } // cleanup CreateFilename(); }
public void Run() { // Create filenames, deleting existing files if exist string filename = CreateFilename(); // // Set up sample data. // Note that we might get minor rounding errors when storing. // The Equals implementation of the ReadonlyItemLngDbl accounts for that. // const int itemCount = 10000; IEnumerable <ArraySegment <ReadonlyItemLngDbl> > data = Utils.GenerateData( 0, itemCount, i => new ReadonlyItemLngDbl(i, (i / 100.0) * 65.0)); // Create new BinCompressedSeriesFile file that stores a sequence of ReadonlyItemLngDbl structs // The file is indexed by a long value inside ReadonlyItemLngDbl marked with the [Index] attribute. // Here we provide a custom field factory that will analyze each field as it is being created, // and may choose to supply a custom field or null to use the default. // The name is the automatically generated, starting with the "root" for the TVal with each // subfield appended afterwards, separated by a dot. // Alternatively, ReadonlyItemLngDbl.SequenceNum can be marked with [Field(typeof(IncrementalIndex))] // For complex types, [Field] attribute can also be set on the type itself. using (var bf = new BinCompressedSeriesFile <long, ReadonlyItemLngDbl>(filename)) { // When a new instance of BinCompressedSeriesFile is created, // RootField will be pre-populated with default configuration objects. // Some fields, such as doubles, require additional configuration before the file can be initialized. var root = (ReadonlyItemLngDblField)bf.RootField; // Index is always increasing var seq = (ScaledDeltaIntField)root.SequenceNumField; seq.DeltaType = DeltaType.Positive; // This double will contain values with no more than 2 digits after the decimal points. // Before serializing, multiply the value by 100 to convert to long. var val1 = (ScaledDeltaFloatField)root.ValueField; val1.Multiplier = 100; bf.UniqueIndexes = true; // enforce index uniqueness - each index is +1 bf.InitializeNewFile(); // Finish new file initialization and create an empty file bf.AppendData(data); // // Verify that the created files are identical (use the default bitwise value type Equals) // if (!bf.Stream().SequenceEqual(data.Stream())) { throw new BinaryFileException("File does not have the right data"); } Console.WriteLine("File {0} created with {1,10:#,#} bytes", filename, bf.BaseStream.Length); } // // Check that the settings are stored ok in the file and can be re-initialized on open // using (var bf1 = (IWritableFeed <long, ReadonlyItemLngDbl>)BinaryFile.Open(filename)) { if (!bf1.Stream().SequenceEqual(data.Stream())) { throw new BinaryFileException("File does not have the right data on the second check"); } } // cleanup CreateFilename(); }
public void Run() { // Create filenames, deleting existing files if exist string filename1 = CreateFilename(1); string filename2 = CreateFilename(2); string filename3 = CreateFilename(3); string filename4 = CreateFilename(4); var sw1 = new Stopwatch(); var sw2 = new Stopwatch(); var sw3 = new Stopwatch(); var sw4 = new Stopwatch(); // // Set up sample data so that the delta between index's long is 1 // and the delta between values is 0.65 => 65 with multiplier, which is bigger than // would fit into a 7 bit signed integer, but would fit into 7 bit unsigned one // const int itemCount = 500000; IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData( 0, itemCount, i => new ItemLngDbl(i, Math.Round((i / 100.0) * 65.0, 2))); // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDbl structs // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute. // Here we provide a custom field factory that will analyze each field as it is being created, // and may choose to supply a custom field or null to use the default. // The name is the automatically generated, starting with the "root" for the TVal with each // subfield appended afterwards, separated by a dot. // Alternatively, ItemLngDbl.SequenceNum can be marked with [Field(typeof(IncrementalIndex))] // For complex types, [Field] attribute can also be set on the type itself. using (var bf1 = new BinCompressedSeriesFile <long, ItemLngDbl>( filename1, fieldFactory: (store, type, name) => type == typeof(long) && name == "root.SequenceNum" // For the long field named "SequenceNum" provide custom IncrementalIndex field serializer ? new IncrementalIndex(store, type, name) : null)) using (var bf2 = new BinCompressedSeriesFile <long, ItemLngDbl>(filename2)) using (var bf3 = new BinCompressedSeriesFile <long, ItemLngDbl>(filename3)) using (var bf4 = new BinSeriesFile <long, ItemLngDbl>(filename4)) { // // Configure bf1 to be the most compression optimized: // * use custom incremental field serializer IncrementalIndex // * use positive-only DeltaType for the value (it always increases in this test) // // When a new instance of BinCompressedSeriesFile is created, // RootField will be pre-populated with default configuration objects. // Some fields, such as doubles, require additional configuration before the file can be initialized. // var root1 = (ComplexField)bf1.RootField; // This double will contain values with no more than 2 digits after the decimal points. // Before serializing, multiply the value by 100 to convert to long. // Next value will always be same or larger than the previous one var val1 = (ScaledDeltaFloatField)root1["Value"].Field; val1.Multiplier = 100; val1.DeltaType = DeltaType.Positive; bf1.UniqueIndexes = true; // enforce index uniqueness - each index is +1 bf1.InitializeNewFile(); // Finish new file initialization and create an empty file // // Initialize bf2 same as bf1, but without custom serializer // var val2 = (ScaledDeltaFloatField)((ComplexField)bf2.RootField)["Value"].Field; val2.Multiplier = 100; val2.DeltaType = DeltaType.Positive; bf2.UniqueIndexes = true; bf2.InitializeNewFile(); // // Initialize bf3 in an identical fashion as bf2, but without positive-only delta type. // var val3 = ((ScaledDeltaFloatField)((ComplexField)bf3.RootField)["Value"].Field); val3.Multiplier = 100; bf3.UniqueIndexes = true; bf3.InitializeNewFile(); // // Initialize the third uncompressed file without any parameters. // bf4.UniqueIndexes = true; bf4.InitializeNewFile(); // // Append the same data to all files, measuring how long it takes // Please note that the timing is not very accurate here, and will give different results depending on the order // sw4.Start(); bf4.AppendData(data); sw4.Stop(); sw3.Start(); bf3.AppendData(data); sw3.Stop(); sw2.Start(); bf2.AppendData(data); sw2.Stop(); sw1.Start(); bf1.AppendData(data); sw1.Stop(); // // Verify that the created files are identical (use the default bitwise value type Equals) // if (!bf1.Stream().SequenceEqual(bf2.Stream())) { throw new BinaryFileException("File #1 != #2"); } if (!bf1.Stream().SequenceEqual(bf3.Stream())) { throw new BinaryFileException("File #1 != #3"); } if (!bf1.Stream().SequenceEqual(bf4.Stream())) { throw new BinaryFileException("File #1 != #4"); } // // Print file sizes to see if there was any benefit // Console.WriteLine("Finished creating files with {0:#,#} items:\n", itemCount); Console.WriteLine( "{2,40}: {0,10:#,#} bytes in {1}", bf1.BaseStream.Length, sw1.Elapsed, "DeltaType.Positive and Calculated index"); Console.WriteLine( "{2,40}: {0,10:#,#} bytes in {1}", bf2.BaseStream.Length, sw2.Elapsed, "DeltaType.Positive"); Console.WriteLine( "{2,40}: {0,10:#,#} bytes in {1}", bf3.BaseStream.Length, sw3.Elapsed, "No optimizations"); Console.WriteLine("{2,40}: {0,10:#,#} bytes in {1}", bf4.BaseStream.Length, sw4.Elapsed, "Uncompressed"); Console.WriteLine(); } // // Check that the settings are stored ok in the file and can be re-initialized on open // using (var bf1 = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename1)) using (var bf2 = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename2)) { if (!bf1.Stream().SequenceEqual(bf2.Stream())) { throw new BinaryFileException("File #1 != #2"); } } // cleanup CreateFilename(1); CreateFilename(2); CreateFilename(3); CreateFilename(4); }
public void Run() { // Create filenames, deleting existing files if exist string filename1 = CreateFilename(1); string filename2 = CreateFilename(2); string filename3 = CreateFilename(3); string filename4 = CreateFilename(4); var sw1 = new Stopwatch(); var sw2 = new Stopwatch(); var sw3 = new Stopwatch(); var sw4 = new Stopwatch(); // // Set up sample data so that the delta between index's long is 1 // and the delta between values is 0.65 => 65 with multiplier, which is bigger than // would fit into a 7 bit signed integer, but would fit into 7 bit unsigned one // const int itemCount = 500000; IEnumerable<ArraySegment<ItemLngDbl>> data = Utils.GenerateData( 0, itemCount, i => new ItemLngDbl(i, Math.Round((i/100.0)*65.0, 2))); // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDbl structs // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute. // Here we provide a custom field factory that will analyze each field as it is being created, // and may choose to supply a custom field or null to use the default. // The name is the automatically generated, starting with the "root" for the TVal with each // subfield appended afterwards, separated by a dot. // Alternatively, ItemLngDbl.SequenceNum can be marked with [Field(typeof(IncrementalIndex))] // For complex types, [Field] attribute can also be set on the type itself. using (var bf1 = new BinCompressedSeriesFile<long, ItemLngDbl>( filename1, fieldFactory: (store, type, name) => type == typeof (long) && name == "root.SequenceNum" // For the long field named "SequenceNum" provide custom IncrementalIndex field serializer ? new IncrementalIndex(store, type, name) : null)) using (var bf2 = new BinCompressedSeriesFile<long, ItemLngDbl>(filename2)) using (var bf3 = new BinCompressedSeriesFile<long, ItemLngDbl>(filename3)) using (var bf4 = new BinSeriesFile<long, ItemLngDbl>(filename4)) { // // Configure bf1 to be the most compression optimized: // * use custom incremental field serializer IncrementalIndex // * use positive-only DeltaType for the value (it always increases in this test) // // When a new instance of BinCompressedSeriesFile is created, // RootField will be pre-populated with default configuration objects. // Some fields, such as doubles, require additional configuration before the file can be initialized. // var root1 = (ComplexField) bf1.RootField; // This double will contain values with no more than 2 digits after the decimal points. // Before serializing, multiply the value by 100 to convert to long. // Next value will always be same or larger than the previous one var val1 = (ScaledDeltaFloatField) root1["Value"].Field; val1.Multiplier = 100; val1.DeltaType = DeltaType.Positive; bf1.UniqueIndexes = true; // enforce index uniqueness - each index is +1 bf1.InitializeNewFile(); // Finish new file initialization and create an empty file // // Initialize bf2 same as bf1, but without custom serializer // var val2 = (ScaledDeltaFloatField) ((ComplexField) bf2.RootField)["Value"].Field; val2.Multiplier = 100; val2.DeltaType = DeltaType.Positive; bf2.UniqueIndexes = true; bf2.InitializeNewFile(); // // Initialize bf3 in an identical fashion as bf2, but without positive-only delta type. // var val3 = ((ScaledDeltaFloatField) ((ComplexField) bf3.RootField)["Value"].Field); val3.Multiplier = 100; bf3.UniqueIndexes = true; bf3.InitializeNewFile(); // // Initialize the third uncompressed file without any parameters. // bf4.UniqueIndexes = true; bf4.InitializeNewFile(); // // Append the same data to all files, measuring how long it takes // Please note that the timing is not very accurate here, and will give different results depending on the order // sw4.Start(); bf4.AppendData(data); sw4.Stop(); sw3.Start(); bf3.AppendData(data); sw3.Stop(); sw2.Start(); bf2.AppendData(data); sw2.Stop(); sw1.Start(); bf1.AppendData(data); sw1.Stop(); // // Verify that the created files are identical (use the default bitwise value type Equals) // if (!bf1.Stream().SequenceEqual(bf2.Stream())) throw new BinaryFileException("File #1 != #2"); if (!bf1.Stream().SequenceEqual(bf3.Stream())) throw new BinaryFileException("File #1 != #3"); if (!bf1.Stream().SequenceEqual(bf4.Stream())) throw new BinaryFileException("File #1 != #4"); // // Print file sizes to see if there was any benefit // Console.WriteLine("Finished creating files with {0:#,#} items:\n", itemCount); Console.WriteLine( "{2,40}: {0,10:#,#} bytes in {1}", bf1.BaseStream.Length, sw1.Elapsed, "DeltaType.Positive and Calculated index"); Console.WriteLine( "{2,40}: {0,10:#,#} bytes in {1}", bf2.BaseStream.Length, sw2.Elapsed, "DeltaType.Positive"); Console.WriteLine( "{2,40}: {0,10:#,#} bytes in {1}", bf3.BaseStream.Length, sw3.Elapsed, "No optimizations"); Console.WriteLine("{2,40}: {0,10:#,#} bytes in {1}", bf4.BaseStream.Length, sw4.Elapsed, "Uncompressed"); Console.WriteLine(); } // // Check that the settings are stored ok in the file and can be re-initialized on open // using (var bf1 = (IWritableFeed<long, ItemLngDbl>)BinaryFile.Open(filename1)) using (var bf2 = (IWritableFeed<long, ItemLngDbl>)BinaryFile.Open(filename2)) { if (!bf1.Stream().SequenceEqual(bf2.Stream())) throw new BinaryFileException("File #1 != #2"); } // cleanup CreateFilename(1); CreateFilename(2); CreateFilename(3); CreateFilename(4); }
public void Run() { string filename = GetType().Name + ".bts"; if (File.Exists(filename)) File.Delete(filename); // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDbl structs // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute. using (var bf = new BinCompressedSeriesFile<long, ItemLngDbl>(filename)) { // // Initialize new file parameters and create it // bf.UniqueIndexes = true; // enforce index uniqueness bf.Tag = "Sample Data"; // optionally provide a tag to store in the file header // // Configure value storage. This is the only difference with using BinSeriesFile. // // When a new instance of BinCompressedSeriesFile is created, // RootField will be pre-populated with default configuration objects. // Some fields, such as doubles, require additional configuration before the file can be initialized. // var root = (ComplexField) bf.RootField; // This double will contain values with no more than 2 digits after the decimal points. // Before serializing, multiply the value by 100 to convert to long. ((ScaledDeltaFloatField) root["Value"].Field).Multiplier = 100; bf.InitializeNewFile(); // Finish new file initialization and create an empty file // // Set up data generator to generate 10 items starting with index 3 // IEnumerable<ArraySegment<ItemLngDbl>> data = Utils.GenerateData(3, 10, i => new ItemLngDbl(i, i/100.0)); // // Append data to the file // bf.AppendData(data); // // Read all data and print it using Stream() - one value at a time // This method is slower than StreamSegments(), but easier to use for simple one-value iteration // Console.WriteLine(" ** Content of file {0} after the first append", filename); Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex); foreach (ItemLngDbl val in bf.Stream()) Console.WriteLine(val); } // Re-open the file, allowing data modifications // IWritableFeed<,> interface is better as it will work with non-compressed files as well using (var bf = (IWritableFeed<long, ItemLngDbl>) BinaryFile.Open(filename, true)) { // Append a few more items with different ItemLngDbl.Value to tell them appart IEnumerable<ArraySegment<ItemLngDbl>> data = Utils.GenerateData(10, 10, i => new ItemLngDbl(i, i/25.0)); // New data indexes will overlap with existing, so allow truncating old data bf.AppendData(data, true); // Print values Console.WriteLine("\n ** Content of file {0} after the second append", filename); Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex); foreach (ItemLngDbl val in bf.Stream()) Console.WriteLine(val); } // Re-open the file for reading only (file can be opened for reading in parallel, but only one write) // IEnumerableFeed<,> interface is better as it will work with non-compressed files as well using (var bf = (IWritableFeed<long, ItemLngDbl>) BinaryFile.Open(filename, true)) { // Show first item with index >= 5 Console.WriteLine( "\nFirst item on or after index 5 is {0}\n", bf.Stream(5, maxItemCount: 1).First()); // Show last item with index < 7 (iterate backwards) Console.WriteLine( "Last item before index 7 is {0}\n", bf.Stream(7, inReverse: true, maxItemCount: 1).First()); // Average of values for indexes >= 4 and < 8 Console.WriteLine( "Average of values for indexes >= 4 and < 8 is {0}\n", bf.Stream(4, 8).Average(i => i.Value)); // Sum of the first 3 values with index less than 18 and going backwards Console.WriteLine( "Sum of the first 3 values with index less than 18 and going backwards is {0}\n", bf.Stream(18, maxItemCount: 3, inReverse: true).Sum(i => i.Value)); } // cleanup File.Delete(filename); }
public void Run() { string filename1 = GetType().Name + "1.bts"; if (File.Exists(filename1)) { File.Delete(filename1); } string filename2 = GetType().Name + "2.bts"; if (File.Exists(filename2)) { File.Delete(filename2); } string filename3 = GetType().Name + "3.bts"; if (File.Exists(filename3)) { File.Delete(filename3); } // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDblDbl structs // The file is indexed by a long value inside ItemLngDblDbl marked with the [Index] attribute. // For comparison sake, also create identical but non-state-linked compressed and uncompressed. using (var bf1 = new BinCompressedSeriesFile <long, ItemLngDblDbl>(filename1)) using (var bf2 = new BinCompressedSeriesFile <long, ItemLngDblDbl>(filename2)) using (var bf3 = new BinSeriesFile <long, ItemLngDblDbl>(filename3)) { // // Configure value storage. This is the only difference with using BinSeriesFile. // // When a new instance of BinCompressedSeriesFile is created, // RootField will be pre-populated with default configuration objects. // Some fields, such as doubles, require additional configuration before the file can be initialized. // var root = (ComplexField)bf1.RootField; var fld1 = (ScaledDeltaFloatField)root["Value1"].Field; var fld2 = (ScaledDeltaFloatField)root["Value2"].Field; // This double will contain values with no more than 2 digits after the decimal points. // Before serializing, multiply the value by 100 to convert to long. fld1.Multiplier = 100; fld2.Multiplier = 100; // ** IMPORTANT: Set the second field's state name the same as the first field, linking them together fld2.StateName = fld1.StateName; bf1.InitializeNewFile(); // Finish new file initialization and create an empty file // // Set up data generator to generate items with closely related value1 and value2 // IEnumerable <ArraySegment <ItemLngDblDbl> > data = Utils.GenerateData(1, 10000, i => new ItemLngDblDbl(i, i * 10, i * 10 + Math.Round(1 / (1.0 + i % 100), 2))); // // Append data to the file // bf1.AppendData(data); // // Initialize the second in an identical fashion without linking the states and append the same data // var root2 = (ComplexField)bf2.RootField; ((ScaledDeltaFloatField)root2["Value1"].Field).Multiplier = 100; ((ScaledDeltaFloatField)root2["Value2"].Field).Multiplier = 100; bf2.InitializeNewFile(); bf2.AppendData(data); // // Initialize the third uncompressed file and append the same data. // bf3.InitializeNewFile(); bf3.AppendData(data); // // Print file sizes to see if there was any benefit // Console.WriteLine(" Shared: {0,10:#,#} bytes", bf1.BaseStream.Length); Console.WriteLine(" NonShared: {0,10:#,#} bytes", bf2.BaseStream.Length); Console.WriteLine("Uncompressed: {0,10:#,#} bytes", bf3.BaseStream.Length); Console.WriteLine(); if (!bf1.Stream().SequenceEqual(bf2.Stream())) { throw new BinaryFileException("File #1 != #2"); } if (!bf1.Stream().SequenceEqual(bf3.Stream())) { throw new BinaryFileException("File #1 != #3"); } } // // Check that the settings are stored ok in the file and can be re-initialized on open // using (var bf1 = (IWritableFeed <long, ItemLngDblDbl>)BinaryFile.Open(filename1)) using (var bf2 = (IWritableFeed <long, ItemLngDblDbl>)BinaryFile.Open(filename2)) { if (!bf1.Stream().SequenceEqual(bf2.Stream())) { throw new BinaryFileException("File #1 != #2"); } } // cleanup File.Delete(filename1); File.Delete(filename2); File.Delete(filename3); }
public void Run() { string filename1 = GetType().Name + "1.bts"; if (File.Exists(filename1)) File.Delete(filename1); string filename2 = GetType().Name + "2.bts"; if (File.Exists(filename2)) File.Delete(filename2); string filename3 = GetType().Name + "3.bts"; if (File.Exists(filename3)) File.Delete(filename3); // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDblDbl structs // The file is indexed by a long value inside ItemLngDblDbl marked with the [Index] attribute. // For comparison sake, also create identical but non-state-linked compressed and uncompressed. using (var bf1 = new BinCompressedSeriesFile<long, ItemLngDblDbl>(filename1)) using (var bf2 = new BinCompressedSeriesFile<long, ItemLngDblDbl>(filename2)) using (var bf3 = new BinSeriesFile<long, ItemLngDblDbl>(filename3)) { // // Configure value storage. This is the only difference with using BinSeriesFile. // // When a new instance of BinCompressedSeriesFile is created, // RootField will be pre-populated with default configuration objects. // Some fields, such as doubles, require additional configuration before the file can be initialized. // var root = (ComplexField) bf1.RootField; var fld1 = (ScaledDeltaFloatField) root["Value1"].Field; var fld2 = (ScaledDeltaFloatField) root["Value2"].Field; // This double will contain values with no more than 2 digits after the decimal points. // Before serializing, multiply the value by 100 to convert to long. fld1.Multiplier = 100; fld2.Multiplier = 100; // ** IMPORTANT: Set the second field's state name the same as the first field, linking them together fld2.StateName = fld1.StateName; bf1.InitializeNewFile(); // Finish new file initialization and create an empty file // // Set up data generator to generate items with closely related value1 and value2 // IEnumerable<ArraySegment<ItemLngDblDbl>> data = Utils.GenerateData(1, 10000, i => new ItemLngDblDbl(i, i*10, i*10 + Math.Round(1/(1.0 + i%100), 2))); // // Append data to the file // bf1.AppendData(data); // // Initialize the second in an identical fashion without linking the states and append the same data // var root2 = (ComplexField) bf2.RootField; ((ScaledDeltaFloatField) root2["Value1"].Field).Multiplier = 100; ((ScaledDeltaFloatField) root2["Value2"].Field).Multiplier = 100; bf2.InitializeNewFile(); bf2.AppendData(data); // // Initialize the third uncompressed file and append the same data. // bf3.InitializeNewFile(); bf3.AppendData(data); // // Print file sizes to see if there was any benefit // Console.WriteLine(" Shared: {0,10:#,#} bytes", bf1.BaseStream.Length); Console.WriteLine(" NonShared: {0,10:#,#} bytes", bf2.BaseStream.Length); Console.WriteLine("Uncompressed: {0,10:#,#} bytes", bf3.BaseStream.Length); Console.WriteLine(); if (!bf1.Stream().SequenceEqual(bf2.Stream())) throw new BinaryFileException("File #1 != #2"); if (!bf1.Stream().SequenceEqual(bf3.Stream())) throw new BinaryFileException("File #1 != #3"); } // // Check that the settings are stored ok in the file and can be re-initialized on open // using (var bf1 = (IWritableFeed<long, ItemLngDblDbl>)BinaryFile.Open(filename1)) using (var bf2 = (IWritableFeed<long, ItemLngDblDbl>)BinaryFile.Open(filename2)) { if (!bf1.Stream().SequenceEqual(bf2.Stream())) throw new BinaryFileException("File #1 != #2"); } // cleanup File.Delete(filename1); File.Delete(filename2); File.Delete(filename3); }
public void Run() { string filename = GetType().Name + ".bts"; if (File.Exists(filename)) { File.Delete(filename); } // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDbl structs // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute. using (var bf = new BinCompressedSeriesFile <long, ItemLngDbl>(filename)) { // // Initialize new file parameters and create it // bf.UniqueIndexes = true; // enforce index uniqueness bf.Tag = "Sample Data"; // optionally provide a tag to store in the file header // // Configure value storage. This is the only difference with using BinSeriesFile. // // When a new instance of BinCompressedSeriesFile is created, // RootField will be pre-populated with default configuration objects. // Some fields, such as doubles, require additional configuration before the file can be initialized. // var root = (ComplexField)bf.RootField; // This double will contain values with no more than 2 digits after the decimal points. // Before serializing, multiply the value by 100 to convert to long. ((ScaledDeltaFloatField)root["Value"].Field).Multiplier = 100; bf.InitializeNewFile(); // Finish new file initialization and create an empty file // // Set up data generator to generate 10 items starting with index 3 // IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(3, 10, i => new ItemLngDbl(i, i / 100.0)); // // Append data to the file // bf.AppendData(data); // // Read all data and print it using Stream() - one value at a time // This method is slower than StreamSegments(), but easier to use for simple one-value iteration // Console.WriteLine(" ** Content of file {0} after the first append", filename); Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex); foreach (ItemLngDbl val in bf.Stream()) { Console.WriteLine(val); } } // Re-open the file, allowing data modifications // IWritableFeed<,> interface is better as it will work with non-compressed files as well using (var bf = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename, true)) { // Append a few more items with different ItemLngDbl.Value to tell them appart IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(10, 10, i => new ItemLngDbl(i, i / 25.0)); // New data indexes will overlap with existing, so allow truncating old data bf.AppendData(data, true); // Print values Console.WriteLine("\n ** Content of file {0} after the second append", filename); Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex); foreach (ItemLngDbl val in bf.Stream()) { Console.WriteLine(val); } } // Re-open the file for reading only (file can be opened for reading in parallel, but only one write) // IEnumerableFeed<,> interface is better as it will work with non-compressed files as well using (var bf = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename, true)) { // Show first item with index >= 5 Console.WriteLine( "\nFirst item on or after index 5 is {0}\n", bf.Stream(5, maxItemCount: 1).First()); // Show last item with index < 7 (iterate backwards) Console.WriteLine( "Last item before index 7 is {0}\n", bf.Stream(7, inReverse: true, maxItemCount: 1).First()); // Average of values for indexes >= 4 and < 8 Console.WriteLine( "Average of values for indexes >= 4 and < 8 is {0}\n", bf.Stream(4, 8).Average(i => i.Value)); // Sum of the first 3 values with index less than 18 and going backwards Console.WriteLine( "Sum of the first 3 values with index less than 18 and going backwards is {0}\n", bf.Stream(18, maxItemCount: 3, inReverse: true).Sum(i => i.Value)); } // cleanup File.Delete(filename); }