コード例 #1
0
ファイル: LegacyTests.cs プロジェクト: zdomokos/timeseriesdb
        private BinSeriesFile<long, _LongByte_SeqPk1> OpenFile(string fileName)
        {
            if (!AllowCreate)
                return (BinSeriesFile<long, _LongByte_SeqPk1>) BinaryFile.Open(fileName, false, LegacyResolver);

            var r = new BinSeriesFile<long, _LongByte_SeqPk1>(fileName) {UniqueIndexes = false};
            r.InitializeNewFile();
            return r;
        }
コード例 #2
0
        private static IWritableFeed <TInd, T> CreateFile <TInd, T>(bool isUnique, string fn)
            where TInd : IComparable <TInd>
            where T : IEquatable <T>
        {
            var r = new BinSeriesFile <TInd, T>(fn)
            {
                UniqueIndexes = isUnique
            };

            r.InitializeNewFile();
            return(r);
        }
コード例 #3
0
ファイル: LegacyTests.cs プロジェクト: valmac/timeseriesdb
        public void TestLegacyReadSupportWithOffset()
        {
            using (BinSeriesFile <long, _LongByte_SeqPk1> f = OpenFile(GetBinFileName()))
            {
                if (AllowCreate)
                {
                    f.AppendData(Data <_LongByte_SeqPk1>(10, 20));
                    TestUtils.CollectionAssertEqual(Data <_LongByte_SeqPk1>(10, 20), f.Stream(0), "#1");
                }

                var buf = new ArraySegment <_LongByte_SeqPk1>(new _LongByte_SeqPk1[21], 10, 11);
                f.ReadData(0, buf);
                TestUtils.CollectionAssertEqual(Data <_LongByte_SeqPk1>(10, 20), buf.Stream(), "#2");
            }
        }
コード例 #4
0
ファイル: LegacyTests.cs プロジェクト: valmac/timeseriesdb
        private BinSeriesFile <long, _LongByte_SeqPk1> OpenFile(string fileName)
        {
            if (!AllowCreate)
            {
                return((BinSeriesFile <long, _LongByte_SeqPk1>)BinaryFile.Open(fileName, false, LegacyResolver));
            }

            var r = new BinSeriesFile <long, _LongByte_SeqPk1>(fileName)
            {
                UniqueIndexes = false
            };

            r.InitializeNewFile();
            return(r);
        }
コード例 #5
0
            /// <summary>
            ///   This method will be called with TInd and TVal properly set to what they are in a file
            /// </summary>
            public long Run <TInd, TVal>(IGenericInvoker2 source, string destinationFile)
                where TInd : IComparable <TInd>
            {
                // The source is the binary file object on which RunGenericMethod() was called
                var src = (IEnumerableFeed <TInd, TVal>)source;

                // Create BinSeriesFile as it is easier to set up than a compressed one
                using (var dst = new BinSeriesFile <TInd, TVal>(destinationFile))
                {
                    // Initialize new file
                    dst.InitializeNewFile();

                    // Copy the entire content of the source file into the destination file
                    dst.AppendData(src.StreamSegments());

                    // Dump content of the new file to the console
                    Console.WriteLine("Destination file\n{0}", Utils.DumpFeed(dst));

                    // Return item count (not supported for compressed files)
                    return(dst.Count);
                }
            }
コード例 #6
0
        public void BasicFunctionality()
        {
            _DatetimeByte_SeqPk1[] newData = TestUtils.GenerateData <_DatetimeByte_SeqPk1>(10000, 0);

            string fileName = GetBinFileName();

            using (BinSeriesFile <UtcDateTime, _DatetimeByte_SeqPk1> f =
                       AllowCreate
                    ? new BinSeriesFile <UtcDateTime, _DatetimeByte_SeqPk1>(fileName)
            {
                UniqueIndexes = false
            }
                    : (BinSeriesFile <UtcDateTime, _DatetimeByte_SeqPk1>)
                   BinaryFile.Open(fileName, false, LegacyResolver))
            {
                if (AllowCreate)
                {
                    f.InitializeNewFile();
                    f.AppendData(new ArraySegment <_DatetimeByte_SeqPk1>(newData));
                }

                _DatetimeByte_SeqPk1[] res = f.ReadData(UtcDateTime.MinValue, UtcDateTime.MaxValue, int.MaxValue);
                TestUtils.AreEqual(newData, res);

                //                if (itemCount > 0)
                //                {
                //                    res = f.ReadData(
                //                        _DatetimeByte_SeqPk1.New(itemCount - 1).a,
                //                        _DatetimeByte_SeqPk1.New(itemCount).a,
                //                        int.MaxValue);
                //                    TestUtils.AreEqual(
                //                        TestUtils.GenerateData<_DatetimeByte_SeqPk1>(
                //                            _DatetimeByte_SeqPk1.New, 1, itemCount - 1), res);
                //                }
            }
        }
コード例 #7
0
        public void Run()
        {
            // Create filenames, deleting existing files if exist
            string filename1 = CreateFilename(1);
            string filename2 = CreateFilename(2);
            string filename3 = CreateFilename(3);
            string filename4 = CreateFilename(4);

            var sw1 = new Stopwatch();
            var sw2 = new Stopwatch();
            var sw3 = new Stopwatch();
            var sw4 = new Stopwatch();

            //
            // Set up sample data so that the delta between index's long is 1
            // and the delta between values is 0.65 => 65 with multiplier, which is bigger than
            // would fit into a 7 bit signed integer, but would fit into 7 bit unsigned one
            //
            const int itemCount = 500000;
            IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(
                0, itemCount, i => new ItemLngDbl(i, Math.Round((i / 100.0) * 65.0, 2)));


            // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDbl structs
            // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute.
            // Here we provide a custom field factory that will analyze each field as it is being created,
            // and may choose to supply a custom field or null to use the default.
            // The name is the automatically generated, starting with the "root" for the TVal with each
            // subfield appended afterwards, separated by a dot.
            // Alternatively, ItemLngDbl.SequenceNum can be marked with [Field(typeof(IncrementalIndex))]
            // For complex types, [Field] attribute can also be set on the type itself.
            using (var bf1 = new BinCompressedSeriesFile <long, ItemLngDbl>(
                       filename1,
                       fieldFactory:
                       (store, type, name) =>
                       type == typeof(long) && name == "root.SequenceNum"
                       // For the long field named "SequenceNum" provide custom IncrementalIndex field serializer
                        ? new IncrementalIndex(store, type, name)
                        : null))
                using (var bf2 = new BinCompressedSeriesFile <long, ItemLngDbl>(filename2))
                    using (var bf3 = new BinCompressedSeriesFile <long, ItemLngDbl>(filename3))
                        using (var bf4 = new BinSeriesFile <long, ItemLngDbl>(filename4))
                        {
                            //
                            // Configure bf1 to be the most compression optimized:
                            //  * use custom incremental field serializer IncrementalIndex
                            //  * use positive-only DeltaType for the value (it always increases in this test)
                            //
                            // When a new instance of BinCompressedSeriesFile is created,
                            // RootField will be pre-populated with default configuration objects.
                            // Some fields, such as doubles, require additional configuration before the file can be initialized.
                            //
                            var root1 = (ComplexField)bf1.RootField;

                            // This double will contain values with no more than 2 digits after the decimal points.
                            // Before serializing, multiply the value by 100 to convert to long.
                            // Next value will always be same or larger than the previous one
                            var val1 = (ScaledDeltaFloatField)root1["Value"].Field;
                            val1.Multiplier   = 100;
                            val1.DeltaType    = DeltaType.Positive;
                            bf1.UniqueIndexes = true; // enforce index uniqueness - each index is +1
                            bf1.InitializeNewFile();  // Finish new file initialization and create an empty file


                            //
                            // Initialize bf2 same as bf1, but without custom serializer
                            //
                            var val2 = (ScaledDeltaFloatField)((ComplexField)bf2.RootField)["Value"].Field;
                            val2.Multiplier   = 100;
                            val2.DeltaType    = DeltaType.Positive;
                            bf2.UniqueIndexes = true;
                            bf2.InitializeNewFile();

                            //
                            // Initialize bf3 in an identical fashion as bf2, but without positive-only delta type.
                            //
                            var val3 = ((ScaledDeltaFloatField)((ComplexField)bf3.RootField)["Value"].Field);
                            val3.Multiplier   = 100;
                            bf3.UniqueIndexes = true;
                            bf3.InitializeNewFile();

                            //
                            // Initialize the third uncompressed file without any parameters.
                            //
                            bf4.UniqueIndexes = true;
                            bf4.InitializeNewFile();

                            //
                            // Append the same data to all files, measuring how long it takes
                            // Please note that the timing is not very accurate here, and will give different results depending on the order
                            //
                            sw4.Start();
                            bf4.AppendData(data);
                            sw4.Stop();

                            sw3.Start();
                            bf3.AppendData(data);
                            sw3.Stop();

                            sw2.Start();
                            bf2.AppendData(data);
                            sw2.Stop();

                            sw1.Start();
                            bf1.AppendData(data);
                            sw1.Stop();

                            //
                            // Verify that the created files are identical (use the default bitwise value type Equals)
                            //
                            if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                            {
                                throw new BinaryFileException("File #1 != #2");
                            }
                            if (!bf1.Stream().SequenceEqual(bf3.Stream()))
                            {
                                throw new BinaryFileException("File #1 != #3");
                            }
                            if (!bf1.Stream().SequenceEqual(bf4.Stream()))
                            {
                                throw new BinaryFileException("File #1 != #4");
                            }

                            //
                            // Print file sizes to see if there was any benefit
                            //
                            Console.WriteLine("Finished creating files with {0:#,#} items:\n", itemCount);
                            Console.WriteLine(
                                "{2,40}: {0,10:#,#} bytes in {1}", bf1.BaseStream.Length, sw1.Elapsed,
                                "DeltaType.Positive and Calculated index");
                            Console.WriteLine(
                                "{2,40}: {0,10:#,#} bytes in {1}", bf2.BaseStream.Length, sw2.Elapsed, "DeltaType.Positive");
                            Console.WriteLine(
                                "{2,40}: {0,10:#,#} bytes in {1}", bf3.BaseStream.Length, sw3.Elapsed, "No optimizations");
                            Console.WriteLine("{2,40}: {0,10:#,#} bytes in {1}", bf4.BaseStream.Length, sw4.Elapsed, "Uncompressed");
                            Console.WriteLine();
                        }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename1))
                using (var bf2 = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename2))
                {
                    if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    {
                        throw new BinaryFileException("File #1 != #2");
                    }
                }

            // cleanup
            CreateFilename(1);
            CreateFilename(2);
            CreateFilename(3);
            CreateFilename(4);
        }
コード例 #8
0
        public void Run()
        {
            // Create filenames, deleting existing files if exist
            string filename1 = CreateFilename(1);
            string filename2 = CreateFilename(2);
            string filename3 = CreateFilename(3);
            string filename4 = CreateFilename(4);

            var sw1 = new Stopwatch();
            var sw2 = new Stopwatch();
            var sw3 = new Stopwatch();
            var sw4 = new Stopwatch();

            //
            // Set up sample data so that the delta between index's long is 1
            // and the delta between values is 0.65 => 65 with multiplier, which is bigger than
            // would fit into a 7 bit signed integer, but would fit into 7 bit unsigned one
            //
            const int itemCount = 500000;
            IEnumerable<ArraySegment<ItemLngDbl>> data = Utils.GenerateData(
                0, itemCount, i => new ItemLngDbl(i, Math.Round((i/100.0)*65.0, 2)));

            // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDbl structs
            // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute.
            // Here we provide a custom field factory that will analyze each field as it is being created,
            // and may choose to supply a custom field or null to use the default.
            // The name is the automatically generated, starting with the "root" for the TVal with each
            // subfield appended afterwards, separated by a dot.
            // Alternatively, ItemLngDbl.SequenceNum can be marked with [Field(typeof(IncrementalIndex))]
            // For complex types, [Field] attribute can also be set on the type itself.
            using (var bf1 = new BinCompressedSeriesFile<long, ItemLngDbl>(
                filename1,
                fieldFactory:
                    (store, type, name) =>
                    type == typeof (long) && name == "root.SequenceNum"
                    // For the long field named "SequenceNum" provide custom IncrementalIndex field serializer
                        ? new IncrementalIndex(store, type, name)
                        : null))
            using (var bf2 = new BinCompressedSeriesFile<long, ItemLngDbl>(filename2))
            using (var bf3 = new BinCompressedSeriesFile<long, ItemLngDbl>(filename3))
            using (var bf4 = new BinSeriesFile<long, ItemLngDbl>(filename4))
            {
                //
                // Configure bf1 to be the most compression optimized:
                //  * use custom incremental field serializer IncrementalIndex
                //  * use positive-only DeltaType for the value (it always increases in this test)
                //
                // When a new instance of BinCompressedSeriesFile is created,
                // RootField will be pre-populated with default configuration objects.
                // Some fields, such as doubles, require additional configuration before the file can be initialized.
                //
                var root1 = (ComplexField) bf1.RootField;

                // This double will contain values with no more than 2 digits after the decimal points.
                // Before serializing, multiply the value by 100 to convert to long.
                // Next value will always be same or larger than the previous one
                var val1 = (ScaledDeltaFloatField) root1["Value"].Field;
                val1.Multiplier = 100;
                val1.DeltaType = DeltaType.Positive;
                bf1.UniqueIndexes = true; // enforce index uniqueness - each index is +1
                bf1.InitializeNewFile(); // Finish new file initialization and create an empty file

                //
                // Initialize bf2 same as bf1, but without custom serializer
                //
                var val2 = (ScaledDeltaFloatField) ((ComplexField) bf2.RootField)["Value"].Field;
                val2.Multiplier = 100;
                val2.DeltaType = DeltaType.Positive;
                bf2.UniqueIndexes = true;
                bf2.InitializeNewFile();

                //
                // Initialize bf3 in an identical fashion as bf2, but without positive-only delta type.
                //
                var val3 = ((ScaledDeltaFloatField) ((ComplexField) bf3.RootField)["Value"].Field);
                val3.Multiplier = 100;
                bf3.UniqueIndexes = true;
                bf3.InitializeNewFile();

                //
                // Initialize the third uncompressed file without any parameters.
                //
                bf4.UniqueIndexes = true;
                bf4.InitializeNewFile();

                //
                // Append the same data to all files, measuring how long it takes
                // Please note that the timing is not very accurate here, and will give different results depending on the order
                //
                sw4.Start();
                bf4.AppendData(data);
                sw4.Stop();

                sw3.Start();
                bf3.AppendData(data);
                sw3.Stop();

                sw2.Start();
                bf2.AppendData(data);
                sw2.Stop();

                sw1.Start();
                bf1.AppendData(data);
                sw1.Stop();

                //
                // Verify that the created files are identical (use the default bitwise value type Equals)
                //
                if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    throw new BinaryFileException("File #1 != #2");
                if (!bf1.Stream().SequenceEqual(bf3.Stream()))
                    throw new BinaryFileException("File #1 != #3");
                if (!bf1.Stream().SequenceEqual(bf4.Stream()))
                    throw new BinaryFileException("File #1 != #4");

                //
                // Print file sizes to see if there was any benefit
                //
                Console.WriteLine("Finished creating files with {0:#,#} items:\n", itemCount);
                Console.WriteLine(
                    "{2,40}: {0,10:#,#} bytes in {1}", bf1.BaseStream.Length, sw1.Elapsed,
                    "DeltaType.Positive and Calculated index");
                Console.WriteLine(
                    "{2,40}: {0,10:#,#} bytes in {1}", bf2.BaseStream.Length, sw2.Elapsed, "DeltaType.Positive");
                Console.WriteLine(
                    "{2,40}: {0,10:#,#} bytes in {1}", bf3.BaseStream.Length, sw3.Elapsed, "No optimizations");
                Console.WriteLine("{2,40}: {0,10:#,#} bytes in {1}", bf4.BaseStream.Length, sw4.Elapsed, "Uncompressed");
                Console.WriteLine();
            }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed<long, ItemLngDbl>)BinaryFile.Open(filename1))
            using (var bf2 = (IWritableFeed<long, ItemLngDbl>)BinaryFile.Open(filename2))
            {
                if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    throw new BinaryFileException("File #1 != #2");
            }

            // cleanup
            CreateFilename(1);
            CreateFilename(2);
            CreateFilename(3);
            CreateFilename(4);
        }
コード例 #9
0
ファイル: Demo_01_Simple.cs プロジェクト: valmac/timeseriesdb
        public void Run()
        {
            string filename = GetType().Name + ".bts";

            if (File.Exists(filename))
            {
                File.Delete(filename);
            }

            // Create new BinSeriesFile file that stores a sequence of ItemLngDbl structs
            // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute.
            using (var bf = new BinSeriesFile <long, ItemLngDbl>(filename))
            {
                //
                // Initialize new file parameters and create it
                //
                bf.UniqueIndexes = true;          // enforce index uniqueness
                bf.Tag           = "Sample Data"; // optionally provide a tag to store in the file header
                bf.InitializeNewFile();           // Finish new file initialization and create an empty file


                //
                // Set up data generator to generate 10 items starting with index 3
                //
                IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(3, 10, i => new ItemLngDbl(i, i / 100.0));


                //
                // Append data to the file
                //
                bf.AppendData(data);


                //
                // Read all data and print it using Stream() - one value at a time
                // This method is slower than StreamSegments(), but easier to use for simple one-value iteration
                //
                Console.WriteLine(" ** Content of file {0} after the first append", filename);
                Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex);
                foreach (ItemLngDbl val in bf.Stream())
                {
                    Console.WriteLine(val);
                }
            }

            // Re-open the file, allowing data modifications
            // IWritableFeed<,> interface is better as it will work with compressed files as well
            using (var bf = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename, true))
            {
                // Append a few more items with different ItemLngDbl.Value to tell them appart
                IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(10, 10, i => new ItemLngDbl(i, i / 25.0));

                // New data indexes will overlap with existing, so allow truncating old data
                bf.AppendData(data, true);

                // Print values
                Console.WriteLine("\n ** Content of file {0} after the second append", filename);
                Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex);
                foreach (ItemLngDbl val in bf.Stream())
                {
                    Console.WriteLine(val);
                }
            }

            // Re-open the file for reading only (file can be opened for reading in parallel, but only one write)
            // IEnumerableFeed<,> interface is better as it will work with compressed files as well
            using (var bf = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename, true))
            {
                // Show first item with index >= 5
                Console.WriteLine(
                    "\nFirst item on or after index 5 is {0}\n",
                    bf.Stream(5, maxItemCount: 1).First());

                // Show last item with index < 7 (iterate backwards)
                Console.WriteLine(
                    "Last item before index 7 is {0}\n",
                    bf.Stream(7, inReverse: true, maxItemCount: 1).First());

                // Average of values for indexes >= 4 and < 8
                Console.WriteLine(
                    "Average of values for indexes >= 4 and < 8 is {0}\n",
                    bf.Stream(4, 8).Average(i => i.Value));

                // Sum of the first 3 values with index less than 18 and going backwards
                Console.WriteLine(
                    "Sum of the first 3 values with index less than 18 and going backwards is {0}\n",
                    bf.Stream(18, maxItemCount: 3, inReverse: true).Sum(i => i.Value));
            }

            // cleanup
            File.Delete(filename);
        }
コード例 #10
0
        public void Run()
        {
            string filename1 = GetType().Name + "1.bts";

            if (File.Exists(filename1))
            {
                File.Delete(filename1);
            }
            string filename2 = GetType().Name + "2.bts";

            if (File.Exists(filename2))
            {
                File.Delete(filename2);
            }
            string filename3 = GetType().Name + "3.bts";

            if (File.Exists(filename3))
            {
                File.Delete(filename3);
            }

            // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDblDbl structs
            // The file is indexed by a long value inside ItemLngDblDbl marked with the [Index] attribute.
            // For comparison sake, also create identical but non-state-linked compressed and uncompressed.
            using (var bf1 = new BinCompressedSeriesFile <long, ItemLngDblDbl>(filename1))
                using (var bf2 = new BinCompressedSeriesFile <long, ItemLngDblDbl>(filename2))
                    using (var bf3 = new BinSeriesFile <long, ItemLngDblDbl>(filename3))
                    {
                        //
                        // Configure value storage. This is the only difference with using BinSeriesFile.
                        //
                        // When a new instance of BinCompressedSeriesFile is created,
                        // RootField will be pre-populated with default configuration objects.
                        // Some fields, such as doubles, require additional configuration before the file can be initialized.
                        //
                        var root = (ComplexField)bf1.RootField;

                        var fld1 = (ScaledDeltaFloatField)root["Value1"].Field;
                        var fld2 = (ScaledDeltaFloatField)root["Value2"].Field;

                        // This double will contain values with no more than 2 digits after the decimal points.
                        // Before serializing, multiply the value by 100 to convert to long.
                        fld1.Multiplier = 100;
                        fld2.Multiplier = 100;

                        // ** IMPORTANT: Set the second field's state name the same as the first field, linking them together
                        fld2.StateName = fld1.StateName;

                        bf1.InitializeNewFile(); // Finish new file initialization and create an empty file


                        //
                        // Set up data generator to generate items with closely related value1 and value2
                        //
                        IEnumerable <ArraySegment <ItemLngDblDbl> > data =
                            Utils.GenerateData(1, 10000, i => new ItemLngDblDbl(i, i * 10, i * 10 + Math.Round(1 / (1.0 + i % 100), 2)));

                        //
                        // Append data to the file
                        //

                        bf1.AppendData(data);


                        //
                        // Initialize the second in an identical fashion without linking the states and append the same data
                        //
                        var root2 = (ComplexField)bf2.RootField;
                        ((ScaledDeltaFloatField)root2["Value1"].Field).Multiplier = 100;
                        ((ScaledDeltaFloatField)root2["Value2"].Field).Multiplier = 100;
                        bf2.InitializeNewFile();
                        bf2.AppendData(data);

                        //
                        // Initialize the third uncompressed file and append the same data.
                        //
                        bf3.InitializeNewFile();
                        bf3.AppendData(data);

                        //
                        // Print file sizes to see if there was any benefit
                        //
                        Console.WriteLine("      Shared: {0,10:#,#} bytes", bf1.BaseStream.Length);
                        Console.WriteLine("   NonShared: {0,10:#,#} bytes", bf2.BaseStream.Length);
                        Console.WriteLine("Uncompressed: {0,10:#,#} bytes", bf3.BaseStream.Length);
                        Console.WriteLine();

                        if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                        {
                            throw new BinaryFileException("File #1 != #2");
                        }
                        if (!bf1.Stream().SequenceEqual(bf3.Stream()))
                        {
                            throw new BinaryFileException("File #1 != #3");
                        }
                    }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed <long, ItemLngDblDbl>)BinaryFile.Open(filename1))
                using (var bf2 = (IWritableFeed <long, ItemLngDblDbl>)BinaryFile.Open(filename2))
                {
                    if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    {
                        throw new BinaryFileException("File #1 != #2");
                    }
                }

            // cleanup
            File.Delete(filename1);
            File.Delete(filename2);
            File.Delete(filename3);
        }
コード例 #11
0
        public void Run()
        {
            string filename1 = GetType().Name + "1.bts";
            if (File.Exists(filename1)) File.Delete(filename1);
            string filename2 = GetType().Name + "2.bts";
            if (File.Exists(filename2)) File.Delete(filename2);
            string filename3 = GetType().Name + "3.bts";
            if (File.Exists(filename3)) File.Delete(filename3);

            // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDblDbl structs
            // The file is indexed by a long value inside ItemLngDblDbl marked with the [Index] attribute.
            // For comparison sake, also create identical but non-state-linked compressed and uncompressed.
            using (var bf1 = new BinCompressedSeriesFile<long, ItemLngDblDbl>(filename1))
            using (var bf2 = new BinCompressedSeriesFile<long, ItemLngDblDbl>(filename2))
            using (var bf3 = new BinSeriesFile<long, ItemLngDblDbl>(filename3))
            {
                //
                // Configure value storage. This is the only difference with using BinSeriesFile.
                //
                // When a new instance of BinCompressedSeriesFile is created,
                // RootField will be pre-populated with default configuration objects.
                // Some fields, such as doubles, require additional configuration before the file can be initialized.
                //
                var root = (ComplexField) bf1.RootField;

                var fld1 = (ScaledDeltaFloatField) root["Value1"].Field;
                var fld2 = (ScaledDeltaFloatField) root["Value2"].Field;

                // This double will contain values with no more than 2 digits after the decimal points.
                // Before serializing, multiply the value by 100 to convert to long.
                fld1.Multiplier = 100;
                fld2.Multiplier = 100;

                // ** IMPORTANT: Set the second field's state name the same as the first field, linking them together
                fld2.StateName = fld1.StateName;

                bf1.InitializeNewFile(); // Finish new file initialization and create an empty file

                //
                // Set up data generator to generate items with closely related value1 and value2
                //
                IEnumerable<ArraySegment<ItemLngDblDbl>> data =
                    Utils.GenerateData(1, 10000, i => new ItemLngDblDbl(i, i*10, i*10 + Math.Round(1/(1.0 + i%100), 2)));

                //
                // Append data to the file
                //

                bf1.AppendData(data);

                //
                // Initialize the second in an identical fashion without linking the states and append the same data
                //
                var root2 = (ComplexField) bf2.RootField;
                ((ScaledDeltaFloatField) root2["Value1"].Field).Multiplier = 100;
                ((ScaledDeltaFloatField) root2["Value2"].Field).Multiplier = 100;
                bf2.InitializeNewFile();
                bf2.AppendData(data);

                //
                // Initialize the third uncompressed file and append the same data.
                //
                bf3.InitializeNewFile();
                bf3.AppendData(data);

                //
                // Print file sizes to see if there was any benefit
                //
                Console.WriteLine("      Shared: {0,10:#,#} bytes", bf1.BaseStream.Length);
                Console.WriteLine("   NonShared: {0,10:#,#} bytes", bf2.BaseStream.Length);
                Console.WriteLine("Uncompressed: {0,10:#,#} bytes", bf3.BaseStream.Length);
                Console.WriteLine();

                if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    throw new BinaryFileException("File #1 != #2");
                if (!bf1.Stream().SequenceEqual(bf3.Stream()))
                    throw new BinaryFileException("File #1 != #3");
            }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed<long, ItemLngDblDbl>)BinaryFile.Open(filename1))
            using (var bf2 = (IWritableFeed<long, ItemLngDblDbl>)BinaryFile.Open(filename2))
            {
                if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    throw new BinaryFileException("File #1 != #2");
            }

            // cleanup
            File.Delete(filename1);
            File.Delete(filename2);
            File.Delete(filename3);
        }
コード例 #12
0
        public void Run()
        {
            string filename = GetType().Name + ".bts";
            if (File.Exists(filename)) File.Delete(filename);

            // Create new BinSeriesFile file that stores a sequence of ItemLngDbl structs
            // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute.
            using (var bf = new BinSeriesFile<long, ItemLngDbl>(filename))
            {
                //
                // Initialize new file parameters and create it
                //
                bf.UniqueIndexes = true; // enforce index uniqueness
                bf.Tag = "Sample Data"; // optionally provide a tag to store in the file header
                bf.InitializeNewFile(); // Finish new file initialization and create an empty file

                //
                // Set up data generator to generate 10 items starting with index 3
                //
                IEnumerable<ArraySegment<ItemLngDbl>> data = Utils.GenerateData(3, 10, i => new ItemLngDbl(i, i/100.0));

                //
                // Append data to the file
                //
                bf.AppendData(data);

                //
                // Read all data and print it using Stream() - one value at a time
                // This method is slower than StreamSegments(), but easier to use for simple one-value iteration
                //
                Console.WriteLine(" ** Content of file {0} after the first append", filename);
                Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex);
                foreach (ItemLngDbl val in bf.Stream())
                    Console.WriteLine(val);
            }

            // Re-open the file, allowing data modifications
            // IWritableFeed<,> interface is better as it will work with compressed files as well
            using (var bf = (IWritableFeed<long, ItemLngDbl>) BinaryFile.Open(filename, true))
            {
                // Append a few more items with different ItemLngDbl.Value to tell them appart
                IEnumerable<ArraySegment<ItemLngDbl>> data = Utils.GenerateData(10, 10, i => new ItemLngDbl(i, i/25.0));

                // New data indexes will overlap with existing, so allow truncating old data
                bf.AppendData(data, true);

                // Print values
                Console.WriteLine("\n ** Content of file {0} after the second append", filename);
                Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex);
                foreach (ItemLngDbl val in bf.Stream())
                    Console.WriteLine(val);
            }

            // Re-open the file for reading only (file can be opened for reading in parallel, but only one write)
            // IEnumerableFeed<,> interface is better as it will work with compressed files as well
            using (var bf = (IWritableFeed<long, ItemLngDbl>)BinaryFile.Open(filename, true))
            {
                // Show first item with index >= 5
                Console.WriteLine(
                    "\nFirst item on or after index 5 is {0}\n",
                    bf.Stream(5, maxItemCount: 1).First());

                // Show last item with index < 7 (iterate backwards)
                Console.WriteLine(
                    "Last item before index 7 is {0}\n",
                    bf.Stream(7, inReverse: true, maxItemCount: 1).First());

                // Average of values for indexes >= 4 and < 8
                Console.WriteLine(
                    "Average of values for indexes >= 4 and < 8 is {0}\n",
                    bf.Stream(4, 8).Average(i => i.Value));

                // Sum of the first 3 values with index less than 18 and going backwards
                Console.WriteLine(
                    "Sum of the first 3 values with index less than 18 and going backwards is {0}\n",
                    bf.Stream(18, maxItemCount: 3, inReverse: true).Sum(i => i.Value));
            }

            // cleanup
            File.Delete(filename);
        }