Exemple #1
0
        public void Test()
        {
            const int blockSize = 100;
            var       data      = new TradesBlock[1000];

            for (int i = 0; i < data.Length; i++)
            {
                data[i] = new TradesBlock(i, blockSize);
            }

            string fileName = GetBinFileName();

            if (AllowCreate)
            {
                using (var f = new BinIndexedFile <TradesBlock>(fileName))
                {
                    ((IBinBlockSerializer)f.Serializer).ItemCount = blockSize;
                    f.InitializeNewFile();
                    f.WriteData(0, new ArraySegment <TradesBlock>(data));

                    VerifyData(f, data);
                }
            }

            using (var bf = (BinIndexedFile <TradesBlock>)BinaryFile.Open(fileName, false, LegacyResolver))
            {
                VerifyData(bf, data);
            }
        }
        public void Run()
        {
            string srcFile = GetType().Name + "1.bts";

            if (File.Exists(srcFile))
            {
                File.Delete(srcFile);
            }

            string dstFile = GetType().Name + "2.bts";

            if (File.Exists(dstFile))
            {
                File.Delete(dstFile);
            }

            try
            {
                // Create sample file and put some data into it
                CreateSampleFile(srcFile);

                // Open sample file in a generic way without specifying the item and index types
                using (var bf = BinaryFile.Open(srcFile))
                {
                    var src = bf as IGenericInvoker2;
                    if (src == null)
                    {
                        // This could be a BinIndexedFile or some legacy file that we no longer support.
                        // Even though BinaryFile supports RunGenericMethod() with one generic argument,
                        // for the purposes of this demo we will only show using it with two that IEnumerableFeed has.
                        Console.WriteLine("File {0} does not support reading through IEnumerableFeed<,>", srcFile);
                        return;
                    }

                    // Print content of the source file
                    Console.WriteLine("Source file\n{0}", Utils.DumpFeed(src));

                    // We need a class that implements IGenericCallable2<,>
                    // As an alternative, "this" class could implement it
                    var callable = new GenericCopier();

                    // Run generic method on the callable class, passing string as a parameter to it
                    long copied = src.RunGenericMethod(callable, dstFile);

                    // Done
                    Console.WriteLine("{0} items was copied from {1} to {2}", copied, srcFile, dstFile);
                }
            }
            finally // Cleanup
            {
                if (File.Exists(srcFile))
                {
                    File.Delete(srcFile);
                }
                if (File.Exists(dstFile))
                {
                    File.Delete(dstFile);
                }
            }
        }
Exemple #3
0
        public void StreamingTest()
        {
            string fileName = GetBinFileName();

            byte[] data = TestUtils.GenerateData <byte>(10000, 0);
            if (AllowCreate)
            {
                using (var b = new BinIndexedFile <byte>(fileName))
                {
                    b.InitializeNewFile();
                    b.WriteData(0, new ArraySegment <byte>(data));
                }
            }

            byte[] bytes = File.ReadAllBytes(fileName);

            using (var b = (BinIndexedFile <byte>)BinaryFile.Open(fileName, false, LegacyResolver))
            {
                var ms    = new MemoryStream(bytes);
                var cs    = new ConfigurableStream(ms);
                var data2 = new byte[data.Length / 2];

                cs.AllowSeek = cs.AllowWrite = false;
                var b2 = (BinIndexedFile <byte>)BinaryFile.Open(cs, LegacyResolver);
                Assert.IsTrue(b2.IsOpen);
                Assert.AreEqual(b.ItemSize, b2.ItemSize);

                b2.ReadData(0, new ArraySegment <byte>(data2));
                TestUtils.CollectionAssertEqual(TestUtils.GenerateData <byte>(data.Length / 2, 0), data2);

                b2.ReadData(0, new ArraySegment <byte>(data2));
                TestUtils.CollectionAssertEqual(TestUtils.GenerateData <byte>(data.Length / 2, data.Length / 2), data2);
            }
        }
 private IWritableFeed <TInd, T> Open <TInd, T>(Func <string, IWritableFeed <TInd, T> > newFile)
     where TInd : IComparable <TInd>
 {
     return(AllowCreate
                ? newFile(GetBinFileName())
                : (IWritableFeed <TInd, T>)BinaryFile.Open(GetBinFileName(), false, LegacyResolver));
 }
Exemple #5
0
        public TSStreamer(string filename)
        {
            this.Filename     = filename;
            this.TimeSeriesDB = (BinCompressedSeriesFile <ulong, TSRecord>)BinaryFile.Open(this.Filename, false);
//			this.TimeSeriesDB = (IEnumerableFeed<ulong, TSRecord>)BinaryFile.Open(this.Filename, false);
            this.LastDt   = DateTime.MinValue;
            this.EndDt    = DateTime.MinValue;
            this.MoreData = false;
        }
Exemple #6
0
        public void Run()
        {
            // Create filenames, deleting existing files if exist
            string filename = CreateFilename();

            //
            // Set up sample data.
            // Note that we might get minor rounding errors when storing.
            // The Equals implementation of the Item accounts for that.
            //
            const int itemCount = 10000;
            IEnumerable <ArraySegment <Item> > data = Utils.GenerateData(
                0, itemCount, i => new Item(i));


            // Create new BinCompressedSeriesFile file that stores a sequence of Item structs
            // The file is indexed by a long value inside Item marked with the [Index] attribute.
            // See the Item struct declaration
            using (var bf = new BinCompressedSeriesFile <long, Item>(filename))
            {
                // Automatically pick the constructor that would set all the public fields in the struct
                var cmpxFld = ((ComplexField)bf.RootField);
                cmpxFld.PopulateFields(ComplexField.Mode.Constructor | ComplexField.Mode.Fields);

                Console.WriteLine("Serialized Fields:\n  {0}\n", string.Join(Environment.NewLine + "  ", cmpxFld.Fields));
                Console.WriteLine("Deserialized with constrtuctor:\n  {0}\n", cmpxFld.Constructor);


                bf.InitializeNewFile(); // Finish new file initialization and create an empty file

                bf.AppendData(data);

                //
                // Verify that the created files are identical (use the default bitwise value type Equals)
                //
                if (!bf.Stream().SequenceEqual(data.Stream()))
                {
                    throw new BinaryFileException("File does not have the right data");
                }

                Console.WriteLine("File {0} created with {1,10:#,#} bytes", filename, bf.BaseStream.Length);
            }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed <long, Item>)BinaryFile.Open(filename))
            {
                if (!bf1.Stream().SequenceEqual(data.Stream()))
                {
                    throw new BinaryFileException("File does not have the right data on the second check");
                }
            }

            // cleanup
            CreateFilename();
        }
Exemple #7
0
        private void RunTest(int itemCount, int repeatRuns, bool uniqueTimestamps, bool enableCache)
        {
            string fileName = GetBinFileName();

            using (BinTimeseriesFile <_DatetimeByte_SeqPk1> f =
                       AllowCreate
                    ? new BinTimeseriesFile <_DatetimeByte_SeqPk1>(fileName)
            {
                UniqueTimestamps = uniqueTimestamps
            }
                    : (BinTimeseriesFile <_DatetimeByte_SeqPk1>)
                   BinaryFile.Open(fileName, false, LegacyResolver))
            {
                f.BinarySearchCacheSize = enableCache ? 0 : -1;

                _DatetimeByte_SeqPk1[] newData =
                    TestUtils.GenerateData <_DatetimeByte_SeqPk1>(itemCount, 0);

                if (AllowCreate)
                {
                    f.InitializeNewFile();
                    f.AppendData(new ArraySegment <_DatetimeByte_SeqPk1>(newData));
                }

                _DatetimeByte_SeqPk1[] res = f.ReadData(UtcDateTime.MinValue, UtcDateTime.MaxValue, int.MaxValue);
                TestUtils.AreEqual(newData, res);

                TestUtils.CollectionAssertEqual(newData, f.Stream(UtcDateTime.MinValue));

                Array.Reverse(newData);
                TestUtils.CollectionAssertEqual(newData, f.Stream(UtcDateTime.MaxValue, inReverse: true));
                TestUtils.CollectionAssertEqual(newData, f.Stream(default(UtcDateTime), inReverse: true));

                if (itemCount > 0)
                {
                    for (int i = 0; i < Math.Min(repeatRuns, itemCount); i++)
                    {
                        UtcDateTime fromInd  = _DatetimeByte_SeqPk1.New(itemCount - i).a;
                        UtcDateTime untilInd = _DatetimeByte_SeqPk1.New(itemCount).a;

                        res = f.ReadData(fromInd, untilInd, int.MaxValue);

                        _DatetimeByte_SeqPk1[] expected = TestUtils.GenerateData <_DatetimeByte_SeqPk1>(i, itemCount - i);
                        TestUtils.AreEqual(expected, res);

                        List <_DatetimeByte_SeqPk1> res1 = f.Stream(fromInd, untilInd.AddTicks(1)).ToList();
                        TestUtils.CollectionAssertEqual(expected, res1);

                        Array.Reverse(expected);

                        List <_DatetimeByte_SeqPk1> res2 = f.Stream(untilInd, fromInd, inReverse: true).ToList();
                        TestUtils.CollectionAssertEqual(expected, res2);
                    }
                }
            }
        }
        private void PageBorderOperations <T>(bool enableMma, int pageSize)
            where T : IEquatable <T>
        {
            DeleteTempFiles();

            string fileName = GetBinFileName();

            using (
                BinIndexedFile <T> f = AllowCreate
                                          ? new BinIndexedFile <T>(fileName)
                                          : (BinIndexedFile <T>)
                                       BinaryFile.Open(fileName, false, LegacyResolver))
            {
                if (AllowCreate)
                {
                    f.InitializeNewFile();
                    f.EnableMemMappedAccessOnRead  = enableMma;
                    f.EnableMemMappedAccessOnWrite = enableMma;
                }

                int itemsPerPage          = pageSize / Marshal.SizeOf(typeof(T));
                int headerSizeAsItemCount = f.HeaderSize / f.ItemSize;
                int items1StPg            = (int)TestUtils.RoundUpToMultiple(headerSizeAsItemCount, itemsPerPage) -
                                            headerSizeAsItemCount;

                if (items1StPg == 0)
                {
                    items1StPg = itemsPerPage;
                }

                T[] dataMinusOne = TestUtils.GenerateData <T>(items1StPg - 1, 0);
                T[] dataZero     = TestUtils.GenerateData <T>(items1StPg, 0);
                T[] dataPlusOne  = TestUtils.GenerateData <T>(items1StPg + 1, 0);

                if (AllowCreate)
                {
                    f.WriteData(0, new ArraySegment <T>(dataMinusOne));
                    Assert.AreEqual(f.HeaderSize + (items1StPg - 1) * f.ItemSize, new FileInfo(fileName).Length);
                    ReadAndAssert(dataMinusOne, f, 0, dataMinusOne.Length);

                    f.WriteData(0, new ArraySegment <T>(dataZero));
                    Assert.AreEqual(f.HeaderSize + items1StPg * f.ItemSize, new FileInfo(fileName).Length);
                    ReadAndAssert(dataZero, f, 0, dataZero.Length);

                    f.WriteData(0, new ArraySegment <T>(dataPlusOne));
                }

                Assert.AreEqual(f.HeaderSize + (items1StPg + 1) * f.ItemSize, new FileInfo(fileName).Length);
                ReadAndAssert(dataPlusOne, f, 0, dataPlusOne.Length);

                ReadAndAssert(TestUtils.GenerateData <T>(1, items1StPg - 1), f, items1StPg - 1, 1);
                ReadAndAssert(TestUtils.GenerateData <T>(1, items1StPg), f, items1StPg, 1);
                ReadAndAssert(TestUtils.GenerateData <T>(2, items1StPg - 1), f, items1StPg - 1, 2);
            }
        }
Exemple #9
0
        private BinSeriesFile <long, _LongByte_SeqPk1> OpenFile(string fileName)
        {
            if (!AllowCreate)
            {
                return((BinSeriesFile <long, _LongByte_SeqPk1>)BinaryFile.Open(fileName, false, LegacyResolver));
            }

            var r = new BinSeriesFile <long, _LongByte_SeqPk1>(fileName)
            {
                UniqueIndexes = false
            };

            r.InitializeNewFile();
            return(r);
        }
Exemple #10
0
        private TSRecord GetQuote(ulong index)
        {
            TSRecord tsr = null;

            try
            {
                var bf = (IEnumerableFeed <ulong, TSRecord>)BinaryFile.Open(this.Filename, false);
                foreach (TSRecord val in bf.Stream(index, index + 1))
                {
                    tsr = val;
                }
            }
            catch (Exception ex)
            {
            }

            return(tsr);
        }
        public void MappingTest()
        {
            string fileName = GetBinFileName();

            _DatetimeByte_SeqPk1[] data = TestUtils.GenerateData <_DatetimeByte_SeqPk1>(1, 10);
            if (AllowCreate)
            {
                using (var f = new BinIndexedFile <_DatetimeByte_SeqPk1>(fileName))
                {
                    f.InitializeNewFile();
                    f.WriteData(0, new ArraySegment <_DatetimeByte_SeqPk1>(data));
                }
            }

            Type   oldT  = typeof(_DatetimeByte_SeqPk1);
            string oldAn = oldT.Assembly.GetName().Name;

            using (BinaryFile f = BinaryFile.Open(
                       fileName, false,
                       tn =>
                       TypeUtils.ParseAndResolve(
                           tn,
                           ts2 => TypeSpec.DefaultFullTypeResolver(
                               ts2,
                               (ts, an) =>
                               an != null && an.Name == oldAn && ts.Name == oldT.FullName ? typeof(_LongByte_SeqPk1) : null,
                               TypeResolver, LegacySupport.TypeResolver, TypeUtils.ResolverFromAnyAssemblyVersion))))
            {
                var p = (BinIndexedFile <_LongByte_SeqPk1>)f;

                var data2 = new _LongByte_SeqPk1[1];
                p.ReadData(0, new ArraySegment <_LongByte_SeqPk1>(data2));

                Assert.AreEqual(data[0].a.Ticks, data2[0].a);
                Assert.AreEqual(data[0].b, data2[0].b);
            }
        }
Exemple #12
0
        public void BasicFunctionality()
        {
            _DatetimeByte_SeqPk1[] newData = TestUtils.GenerateData <_DatetimeByte_SeqPk1>(10000, 0);

            string fileName = GetBinFileName();

            using (BinSeriesFile <UtcDateTime, _DatetimeByte_SeqPk1> f =
                       AllowCreate
                    ? new BinSeriesFile <UtcDateTime, _DatetimeByte_SeqPk1>(fileName)
            {
                UniqueIndexes = false
            }
                    : (BinSeriesFile <UtcDateTime, _DatetimeByte_SeqPk1>)
                   BinaryFile.Open(fileName, false, LegacyResolver))
            {
                if (AllowCreate)
                {
                    f.InitializeNewFile();
                    f.AppendData(new ArraySegment <_DatetimeByte_SeqPk1>(newData));
                }

                _DatetimeByte_SeqPk1[] res = f.ReadData(UtcDateTime.MinValue, UtcDateTime.MaxValue, int.MaxValue);
                TestUtils.AreEqual(newData, res);

                //                if (itemCount > 0)
                //                {
                //                    res = f.ReadData(
                //                        _DatetimeByte_SeqPk1.New(itemCount - 1).a,
                //                        _DatetimeByte_SeqPk1.New(itemCount).a,
                //                        int.MaxValue);
                //                    TestUtils.AreEqual(
                //                        TestUtils.GenerateData<_DatetimeByte_SeqPk1>(
                //                            _DatetimeByte_SeqPk1.New, 1, itemCount - 1), res);
                //                }
            }
        }
        private void EmptyFile <T>(int expectedItemSize)
        {
            const string testName = "EmptyFile";

            try
            {
                Stopwatch sw = TestStart();

                int     hdrSize;
                Version fileVersion, baseVersion, serializerVersion;

                string fileName = GetBinFileName();

                if (AllowCreate)
                {
                    using (var f = new BinIndexedFile <T>(fileName))
                    {
                        f.InitializeNewFile();
                        fileVersion = f.Version;
                        Assert.IsNotNull(fileVersion);
                        baseVersion = f.BaseVersion;
                        Assert.IsNotNull(baseVersion);
                        serializerVersion = f.Serializer.Version;
                        Assert.IsNotNull(serializerVersion);

                        Assert.AreEqual(true, f.CanWrite);
                        Assert.AreEqual(0, f.Count);

                        hdrSize = f.HeaderSize;

                        Assert.AreEqual(expectedItemSize, f.ItemSize);
                        Assert.IsTrue(hdrSize % f.ItemSize == 0);
                        Assert.IsTrue(f.IsEmpty);
                    }
                }
                else
                {
                    fileVersion = baseVersion = serializerVersion = default(Version);
                    hdrSize     = 0;
                }

                using (BinaryFile file = BinaryFile.Open(fileName, false, LegacyResolver))
                {
                    Assert.IsInstanceOf <BinIndexedFile <T> >(file);
                    Assert.AreEqual(typeof(T), file.ItemType);
                    var f = (BinIndexedFile <T>)file;

                    if (AllowCreate)
                    {
                        Assert.AreEqual(fileVersion, f.Version);
                        Assert.AreEqual(baseVersion, f.BaseVersion);
                        Assert.AreEqual(serializerVersion, f.Serializer.Version);
                        Assert.AreEqual(hdrSize, f.HeaderSize);
                    }

                    Assert.AreEqual(false, f.CanWrite);
                    Assert.AreEqual(0, f.Count);

                    Assert.IsTrue(f.IsEmpty);
                    Assert.AreEqual(expectedItemSize, f.ItemSize);
                }

                TestStop <T>(testName, sw);
            }
            catch
            {
                Console.WriteLine("Error in " + testName);
                throw;
            }
        }
        private void Run(
            string name, int itemCount,
            Func <string, IWritableFeed <UtcDateTime, _DatetimeByte_SeqPk1> > newFile,
            Action <IWritableFeed <UtcDateTime, _DatetimeByte_SeqPk1> > update,
            Action <IWritableFeed <UtcDateTime, _DatetimeByte_SeqPk1> > init)
        {
            string fileName = GetBinFileName();

            IEnumerable <ArraySegment <_DatetimeByte_SeqPk1> > newData = Data(itemCount, 0, itemCount);
            List <_DatetimeByte_SeqPk1> expected = newData.Stream().ToList();

            Assert.AreEqual(itemCount, expected.Count);
            _DatetimeByte_SeqPk1[] expectedRev = expected.ToArray();
            Array.Reverse(expectedRev);
            IWritableFeed <UtcDateTime, _DatetimeByte_SeqPk1>
            f = !AllowCreate
                        ? (IWritableFeed <UtcDateTime, _DatetimeByte_SeqPk1>)
                BinaryFile.Open(fileName, false, LegacyResolver)
                        : newFile(fileName);

            try
            {
                if (update != null)
                {
                    update(f);
                }

                if (AllowCreate)
                {
                    init(f);
                    f.AppendData(newData);
                    f.Dispose();
                    f =
                        (IWritableFeed <UtcDateTime, _DatetimeByte_SeqPk1>)
                        BinaryFile.Open(fileName, false, LegacyResolver);
                }

                TestUtils.CollectionAssertEqual(
                    _empty, f.Stream(UtcDateTime.MinValue, inReverse: true),
                    "nothing before 0 {0}", name);

                TestUtils.CollectionAssertEqual(
                    _empty, f.Stream(UtcDateTime.MaxValue),
                    "nothing after max {0}", name);

                if (itemCount <= 0)
                {
                    Assert.IsTrue(f.IsEmpty, "IsEmpty {0}", name);
                    Assert.AreEqual(default(UtcDateTime), f.FirstIndex, "default FirstInd {0}", name);
                    Assert.AreEqual(default(UtcDateTime), f.LastIndex, "default LastInd {0}", name);
                    TestUtils.CollectionAssertEqual(_empty, f.Stream(UtcDateTime.MinValue), "empty forward {0}", name);
                    TestUtils.CollectionAssertEqual(
                        _empty, f.Stream(UtcDateTime.MinValue, inReverse: true), "empty backward {0}", name);
                    return;
                }

                Assert.IsFalse(f.IsEmpty, "!IsEmpty {0}", name);

                Assert.AreEqual(expected[0].a, f.FirstIndex, name + " first");
                Assert.AreEqual(expected[itemCount - 1].a, f.LastIndex, "last {0}", name);

                TestUtils.CollectionAssertEqual(expected, f.Stream(UtcDateTime.MinValue), "full forward {0}", name);
                TestUtils.CollectionAssertEqual(
                    expectedRev, f.Stream(UtcDateTime.MaxValue, inReverse: true), "full backward {0}", name);

                const int skipStart = 0;
                const int takeStart = 0;

                const int maxSkipCount = 50;

                int maxSkip = Math.Min(maxSkipCount, itemCount);
                for (int skip = skipStart; skip < maxSkip; skip++)
                {
                    int maxTake = Math.Min(maxSkipCount, itemCount - maxSkip + 1);
                    for (int take = takeStart; take < maxTake; take++)
                    {
                        TestUtils.CollectionAssertEqual(
                            expected.Skip(skip).Take(take), f.Stream(expected[skip].a, maxItemCount: take),
                            "skip {1} take {2} {0}", name, skip, take);

                        if (itemCount < take)
                        {
                            TestUtils.CollectionAssertEqual(
                                expected.Skip(skip).Take(take - 1),
                                f.Stream(expected[skip].a.AddSeconds(1), maxItemCount: take - 1),
                                "next tick skip {1} take ({2}-1) {0}", name, skip, take);
                        }

                        if (itemCount < skip)
                        {
                            TestUtils.CollectionAssertEqual(
                                expectedRev.Skip(skip - 1).Take(take),
                                f.Stream(expectedRev[skip].a, maxItemCount: take, inReverse: true),
                                "backward, existing item, skip {1} take {2} {0}", name, skip, take);

                            TestUtils.CollectionAssertEqual(
                                expectedRev.Skip(skip - 1).Take(take),
                                f.Stream(expectedRev[skip].a.AddSeconds(-1), maxItemCount: take, inReverse: true),
                                "backward, non-existing, skip {1} take {2} {0}", name, skip, take);
                        }
                    }
                }
            }
            finally
            {
                f.Dispose();
            }
        }
        private void FileIncrementalAddition <T>() where T : IEquatable <T>
        {
            const string testName = "FileIncrementalAddition";

            try
            {
                Stopwatch sw = TestStart();

                T[] data0 = TestUtils.GenerateData <T>(1, 10);
                T[] data1 = TestUtils.GenerateData <T>(2, 20);
                T[] data2 = TestUtils.GenerateData <T>(3, 30);

                string fileName = GetBinFileName();

                if (AllowCreate)
                {
                    using (var f = new BinIndexedFile <T>(fileName))
                    {
                        f.InitializeNewFile();
                        f.WriteData(0, new ArraySegment <T>(data0));

                        Assert.AreEqual(true, f.CanWrite);
                        Assert.AreEqual(1, f.Count);
                        Assert.IsFalse(f.IsEmpty);

                        ReadAndAssert(data0, f, 0, f.Count);
                    }
                }

                using (BinaryFile file = BinaryFile.Open(fileName, AllowCreate, LegacyResolver))
                {
                    Assert.IsInstanceOf <BinIndexedFile <T> >(file);
                    var f = (BinIndexedFile <T>)file;

                    Assert.AreEqual(AllowCreate, f.CanWrite);
                    if (AllowCreate)
                    {
                        Assert.AreEqual(1, f.Count);
                        ReadAndAssert(data0, f, 0, f.Count);

                        // Replace with buff2 starting at 0
                        WriteData(f, 0, data1);
                        Assert.AreEqual(2, f.Count);
                        ReadAndAssert(data1, f, 0, f.Count);

                        // Append buff1
                        WriteData(f, f.Count, data0);
                        Assert.AreEqual(3, f.Count);
                        ReadAndAssert(data0, f, 2, 1);

                        // Write buff3 instead of buff1
                        WriteData(f, data1.Length, data2);
                    }

                    Assert.AreEqual(data1.Length + data2.Length, f.Count);
                    ReadAndAssert(TestUtils.Concatenate(data1, data2), f, 0, f.Count);
                }

                TestStop <T>(testName, sw);
            }
            catch
            {
                Console.WriteLine("Error in " + testName);
                throw;
            }
        }
        public void Run()
        {
            // Create filenames, deleting existing files if exist
            string filename = CreateFilename();

            //
            // Set up sample data.
            // Note that we might get minor rounding errors when storing.
            // The Equals implementation of the ReadonlyItemLngDbl accounts for that.
            //
            const int itemCount = 10000;
            IEnumerable <ArraySegment <ReadonlyItemLngDbl> > data = Utils.GenerateData(
                0, itemCount, i => new ReadonlyItemLngDbl(i, (i / 100.0) * 65.0));


            // Create new BinCompressedSeriesFile file that stores a sequence of ReadonlyItemLngDbl structs
            // The file is indexed by a long value inside ReadonlyItemLngDbl marked with the [Index] attribute.
            // Here we provide a custom field factory that will analyze each field as it is being created,
            // and may choose to supply a custom field or null to use the default.
            // The name is the automatically generated, starting with the "root" for the TVal with each
            // subfield appended afterwards, separated by a dot.
            // Alternatively, ReadonlyItemLngDbl.SequenceNum can be marked with [Field(typeof(IncrementalIndex))]
            // For complex types, [Field] attribute can also be set on the type itself.
            using (var bf = new BinCompressedSeriesFile <long, ReadonlyItemLngDbl>(filename))
            {
                // When a new instance of BinCompressedSeriesFile is created,
                // RootField will be pre-populated with default configuration objects.
                // Some fields, such as doubles, require additional configuration before the file can be initialized.
                var root = (ReadonlyItemLngDblField)bf.RootField;

                // Index is always increasing
                var seq = (ScaledDeltaIntField)root.SequenceNumField;
                seq.DeltaType = DeltaType.Positive;

                // This double will contain values with no more than 2 digits after the decimal points.
                // Before serializing, multiply the value by 100 to convert to long.
                var val1 = (ScaledDeltaFloatField)root.ValueField;
                val1.Multiplier = 100;

                bf.UniqueIndexes = true; // enforce index uniqueness - each index is +1
                bf.InitializeNewFile();  // Finish new file initialization and create an empty file


                bf.AppendData(data);

                //
                // Verify that the created files are identical (use the default bitwise value type Equals)
                //
                if (!bf.Stream().SequenceEqual(data.Stream()))
                {
                    throw new BinaryFileException("File does not have the right data");
                }

                Console.WriteLine("File {0} created with {1,10:#,#} bytes", filename, bf.BaseStream.Length);
            }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed <long, ReadonlyItemLngDbl>)BinaryFile.Open(filename))
            {
                if (!bf1.Stream().SequenceEqual(data.Stream()))
                {
                    throw new BinaryFileException("File does not have the right data on the second check");
                }
            }

            // cleanup
            CreateFilename();
        }
Exemple #17
0
        public void Run()
        {
            string filename1 = GetType().Name + "1.bts";

            if (File.Exists(filename1))
            {
                File.Delete(filename1);
            }
            string filename2 = GetType().Name + "2.bts";

            if (File.Exists(filename2))
            {
                File.Delete(filename2);
            }
            string filename3 = GetType().Name + "3.bts";

            if (File.Exists(filename3))
            {
                File.Delete(filename3);
            }

            // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDblDbl structs
            // The file is indexed by a long value inside ItemLngDblDbl marked with the [Index] attribute.
            // For comparison sake, also create identical but non-state-linked compressed and uncompressed.
            using (var bf1 = new BinCompressedSeriesFile <long, ItemLngDblDbl>(filename1))
                using (var bf2 = new BinCompressedSeriesFile <long, ItemLngDblDbl>(filename2))
                    using (var bf3 = new BinSeriesFile <long, ItemLngDblDbl>(filename3))
                    {
                        //
                        // Configure value storage. This is the only difference with using BinSeriesFile.
                        //
                        // When a new instance of BinCompressedSeriesFile is created,
                        // RootField will be pre-populated with default configuration objects.
                        // Some fields, such as doubles, require additional configuration before the file can be initialized.
                        //
                        var root = (ComplexField)bf1.RootField;

                        var fld1 = (ScaledDeltaFloatField)root["Value1"].Field;
                        var fld2 = (ScaledDeltaFloatField)root["Value2"].Field;

                        // This double will contain values with no more than 2 digits after the decimal points.
                        // Before serializing, multiply the value by 100 to convert to long.
                        fld1.Multiplier = 100;
                        fld2.Multiplier = 100;

                        // ** IMPORTANT: Set the second field's state name the same as the first field, linking them together
                        fld2.StateName = fld1.StateName;

                        bf1.InitializeNewFile(); // Finish new file initialization and create an empty file


                        //
                        // Set up data generator to generate items with closely related value1 and value2
                        //
                        IEnumerable <ArraySegment <ItemLngDblDbl> > data =
                            Utils.GenerateData(1, 10000, i => new ItemLngDblDbl(i, i * 10, i * 10 + Math.Round(1 / (1.0 + i % 100), 2)));

                        //
                        // Append data to the file
                        //

                        bf1.AppendData(data);


                        //
                        // Initialize the second in an identical fashion without linking the states and append the same data
                        //
                        var root2 = (ComplexField)bf2.RootField;
                        ((ScaledDeltaFloatField)root2["Value1"].Field).Multiplier = 100;
                        ((ScaledDeltaFloatField)root2["Value2"].Field).Multiplier = 100;
                        bf2.InitializeNewFile();
                        bf2.AppendData(data);

                        //
                        // Initialize the third uncompressed file and append the same data.
                        //
                        bf3.InitializeNewFile();
                        bf3.AppendData(data);

                        //
                        // Print file sizes to see if there was any benefit
                        //
                        Console.WriteLine("      Shared: {0,10:#,#} bytes", bf1.BaseStream.Length);
                        Console.WriteLine("   NonShared: {0,10:#,#} bytes", bf2.BaseStream.Length);
                        Console.WriteLine("Uncompressed: {0,10:#,#} bytes", bf3.BaseStream.Length);
                        Console.WriteLine();

                        if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                        {
                            throw new BinaryFileException("File #1 != #2");
                        }
                        if (!bf1.Stream().SequenceEqual(bf3.Stream()))
                        {
                            throw new BinaryFileException("File #1 != #3");
                        }
                    }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed <long, ItemLngDblDbl>)BinaryFile.Open(filename1))
                using (var bf2 = (IWritableFeed <long, ItemLngDblDbl>)BinaryFile.Open(filename2))
                {
                    if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    {
                        throw new BinaryFileException("File #1 != #2");
                    }
                }

            // cleanup
            File.Delete(filename1);
            File.Delete(filename2);
            File.Delete(filename3);
        }
Exemple #18
0
        public void Run()
        {
            string filename = GetType().Name + ".bts";

            if (File.Exists(filename))
            {
                File.Delete(filename);
            }

            // Create new BinSeriesFile file that stores a sequence of ItemLngDbl structs
            // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute.
            using (var bf = new BinSeriesFile <long, ItemLngDbl>(filename))
            {
                //
                // Initialize new file parameters and create it
                //
                bf.UniqueIndexes = true;          // enforce index uniqueness
                bf.Tag           = "Sample Data"; // optionally provide a tag to store in the file header
                bf.InitializeNewFile();           // Finish new file initialization and create an empty file


                //
                // Set up data generator to generate 10 items starting with index 3
                //
                IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(3, 10, i => new ItemLngDbl(i, i / 100.0));


                //
                // Append data to the file
                //
                bf.AppendData(data);


                //
                // Read all data and print it using Stream() - one value at a time
                // This method is slower than StreamSegments(), but easier to use for simple one-value iteration
                //
                Console.WriteLine(" ** Content of file {0} after the first append", filename);
                Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex);
                foreach (ItemLngDbl val in bf.Stream())
                {
                    Console.WriteLine(val);
                }
            }

            // Re-open the file, allowing data modifications
            // IWritableFeed<,> interface is better as it will work with compressed files as well
            using (var bf = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename, true))
            {
                // Append a few more items with different ItemLngDbl.Value to tell them appart
                IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(10, 10, i => new ItemLngDbl(i, i / 25.0));

                // New data indexes will overlap with existing, so allow truncating old data
                bf.AppendData(data, true);

                // Print values
                Console.WriteLine("\n ** Content of file {0} after the second append", filename);
                Console.WriteLine("FirstIndex = {0}, LastIndex = {1}", bf.FirstIndex, bf.LastIndex);
                foreach (ItemLngDbl val in bf.Stream())
                {
                    Console.WriteLine(val);
                }
            }

            // Re-open the file for reading only (file can be opened for reading in parallel, but only one write)
            // IEnumerableFeed<,> interface is better as it will work with compressed files as well
            using (var bf = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename, true))
            {
                // Show first item with index >= 5
                Console.WriteLine(
                    "\nFirst item on or after index 5 is {0}\n",
                    bf.Stream(5, maxItemCount: 1).First());

                // Show last item with index < 7 (iterate backwards)
                Console.WriteLine(
                    "Last item before index 7 is {0}\n",
                    bf.Stream(7, inReverse: true, maxItemCount: 1).First());

                // Average of values for indexes >= 4 and < 8
                Console.WriteLine(
                    "Average of values for indexes >= 4 and < 8 is {0}\n",
                    bf.Stream(4, 8).Average(i => i.Value));

                // Sum of the first 3 values with index less than 18 and going backwards
                Console.WriteLine(
                    "Sum of the first 3 values with index less than 18 and going backwards is {0}\n",
                    bf.Stream(18, maxItemCount: 3, inReverse: true).Sum(i => i.Value));
            }

            // cleanup
            File.Delete(filename);
        }
        public void Run()
        {
            // Create filenames, deleting existing files if exist
            string filename1 = CreateFilename(1);
            string filename2 = CreateFilename(2);
            string filename3 = CreateFilename(3);
            string filename4 = CreateFilename(4);

            var sw1 = new Stopwatch();
            var sw2 = new Stopwatch();
            var sw3 = new Stopwatch();
            var sw4 = new Stopwatch();

            //
            // Set up sample data so that the delta between index's long is 1
            // and the delta between values is 0.65 => 65 with multiplier, which is bigger than
            // would fit into a 7 bit signed integer, but would fit into 7 bit unsigned one
            //
            const int itemCount = 500000;
            IEnumerable <ArraySegment <ItemLngDbl> > data = Utils.GenerateData(
                0, itemCount, i => new ItemLngDbl(i, Math.Round((i / 100.0) * 65.0, 2)));


            // Create new BinCompressedSeriesFile file that stores a sequence of ItemLngDbl structs
            // The file is indexed by a long value inside ItemLngDbl marked with the [Index] attribute.
            // Here we provide a custom field factory that will analyze each field as it is being created,
            // and may choose to supply a custom field or null to use the default.
            // The name is the automatically generated, starting with the "root" for the TVal with each
            // subfield appended afterwards, separated by a dot.
            // Alternatively, ItemLngDbl.SequenceNum can be marked with [Field(typeof(IncrementalIndex))]
            // For complex types, [Field] attribute can also be set on the type itself.
            using (var bf1 = new BinCompressedSeriesFile <long, ItemLngDbl>(
                       filename1,
                       fieldFactory:
                       (store, type, name) =>
                       type == typeof(long) && name == "root.SequenceNum"
                       // For the long field named "SequenceNum" provide custom IncrementalIndex field serializer
                        ? new IncrementalIndex(store, type, name)
                        : null))
                using (var bf2 = new BinCompressedSeriesFile <long, ItemLngDbl>(filename2))
                    using (var bf3 = new BinCompressedSeriesFile <long, ItemLngDbl>(filename3))
                        using (var bf4 = new BinSeriesFile <long, ItemLngDbl>(filename4))
                        {
                            //
                            // Configure bf1 to be the most compression optimized:
                            //  * use custom incremental field serializer IncrementalIndex
                            //  * use positive-only DeltaType for the value (it always increases in this test)
                            //
                            // When a new instance of BinCompressedSeriesFile is created,
                            // RootField will be pre-populated with default configuration objects.
                            // Some fields, such as doubles, require additional configuration before the file can be initialized.
                            //
                            var root1 = (ComplexField)bf1.RootField;

                            // This double will contain values with no more than 2 digits after the decimal points.
                            // Before serializing, multiply the value by 100 to convert to long.
                            // Next value will always be same or larger than the previous one
                            var val1 = (ScaledDeltaFloatField)root1["Value"].Field;
                            val1.Multiplier   = 100;
                            val1.DeltaType    = DeltaType.Positive;
                            bf1.UniqueIndexes = true; // enforce index uniqueness - each index is +1
                            bf1.InitializeNewFile();  // Finish new file initialization and create an empty file


                            //
                            // Initialize bf2 same as bf1, but without custom serializer
                            //
                            var val2 = (ScaledDeltaFloatField)((ComplexField)bf2.RootField)["Value"].Field;
                            val2.Multiplier   = 100;
                            val2.DeltaType    = DeltaType.Positive;
                            bf2.UniqueIndexes = true;
                            bf2.InitializeNewFile();

                            //
                            // Initialize bf3 in an identical fashion as bf2, but without positive-only delta type.
                            //
                            var val3 = ((ScaledDeltaFloatField)((ComplexField)bf3.RootField)["Value"].Field);
                            val3.Multiplier   = 100;
                            bf3.UniqueIndexes = true;
                            bf3.InitializeNewFile();

                            //
                            // Initialize the third uncompressed file without any parameters.
                            //
                            bf4.UniqueIndexes = true;
                            bf4.InitializeNewFile();

                            //
                            // Append the same data to all files, measuring how long it takes
                            // Please note that the timing is not very accurate here, and will give different results depending on the order
                            //
                            sw4.Start();
                            bf4.AppendData(data);
                            sw4.Stop();

                            sw3.Start();
                            bf3.AppendData(data);
                            sw3.Stop();

                            sw2.Start();
                            bf2.AppendData(data);
                            sw2.Stop();

                            sw1.Start();
                            bf1.AppendData(data);
                            sw1.Stop();

                            //
                            // Verify that the created files are identical (use the default bitwise value type Equals)
                            //
                            if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                            {
                                throw new BinaryFileException("File #1 != #2");
                            }
                            if (!bf1.Stream().SequenceEqual(bf3.Stream()))
                            {
                                throw new BinaryFileException("File #1 != #3");
                            }
                            if (!bf1.Stream().SequenceEqual(bf4.Stream()))
                            {
                                throw new BinaryFileException("File #1 != #4");
                            }

                            //
                            // Print file sizes to see if there was any benefit
                            //
                            Console.WriteLine("Finished creating files with {0:#,#} items:\n", itemCount);
                            Console.WriteLine(
                                "{2,40}: {0,10:#,#} bytes in {1}", bf1.BaseStream.Length, sw1.Elapsed,
                                "DeltaType.Positive and Calculated index");
                            Console.WriteLine(
                                "{2,40}: {0,10:#,#} bytes in {1}", bf2.BaseStream.Length, sw2.Elapsed, "DeltaType.Positive");
                            Console.WriteLine(
                                "{2,40}: {0,10:#,#} bytes in {1}", bf3.BaseStream.Length, sw3.Elapsed, "No optimizations");
                            Console.WriteLine("{2,40}: {0,10:#,#} bytes in {1}", bf4.BaseStream.Length, sw4.Elapsed, "Uncompressed");
                            Console.WriteLine();
                        }

            //
            // Check that the settings are stored ok in the file and can be re-initialized on open
            //
            using (var bf1 = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename1))
                using (var bf2 = (IWritableFeed <long, ItemLngDbl>)BinaryFile.Open(filename2))
                {
                    if (!bf1.Stream().SequenceEqual(bf2.Stream()))
                    {
                        throw new BinaryFileException("File #1 != #2");
                    }
                }

            // cleanup
            CreateFilename(1);
            CreateFilename(2);
            CreateFilename(3);
            CreateFilename(4);
        }
        public void BasicFunctionality()
        {
            string fileName = GetBinFileName();

            if (AllowCreate)
            {
                BinIndexedFile <byte> temp;
                using (var f = new BinIndexedFile <byte>(fileName))
                {
                    temp = f;
#pragma warning disable 168
                    Assert.Throws <InvalidOperationException>(() => { var v = f.Count; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.Version; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.HeaderSize; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.IsEmpty; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.ItemSize; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.EnableMemMappedAccessOnRead; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.EnableMemMappedAccessOnWrite; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.Serializer.Version; });
                    Assert.Throws <InvalidOperationException>(() => { var v = f.CanWrite; });
#pragma warning restore 168

                    Assert.IsFalse(f.IsInitialized);
                    Assert.IsFalse(f.IsDisposed);
                    Assert.IsFalse(f.IsOpen);
                    Assert.AreEqual(fileName, f.FileName);
                    Assert.AreEqual("", f.Tag);
                    Assert.AreEqual(typeof(byte), f.ItemType);
                    Assert.IsNotNull(f.Serializer);
                    f.Tag = TagString;
                    Assert.AreEqual(TagString, f.Tag);

                    Version curBaseVer = f.BaseVersion;
                    f.BaseVersion = new Version(1, 0);
                    f.BaseVersion = new Version(1, 1);
                    f.BaseVersion = new Version(1, 2);
                    Assert.Throws <ArgumentNullException>(() => { f.BaseVersion = null; });
                    Assert.Throws <IncompatibleVersionException>(
                        () => { f.BaseVersion = new Version(0, 0); });
                    f.BaseVersion = curBaseVer;


                    f.InitializeNewFile();

                    Assert.IsTrue(f.CanWrite);

                    Assert.IsNotNull(f.Serializer.Version);
                    Assert.IsTrue(f.IsInitialized);
                    Assert.IsFalse(f.IsDisposed);
                    Assert.IsTrue(f.IsOpen);
                    Assert.AreEqual(fileName, f.FileName);

                    Assert.IsFalse(f.EnableMemMappedAccessOnRead);
                    f.EnableMemMappedAccessOnRead = false;
                    Assert.IsFalse(f.EnableMemMappedAccessOnRead);
                    f.EnableMemMappedAccessOnRead = true;
                    Assert.IsTrue(f.EnableMemMappedAccessOnRead);

                    Assert.IsFalse(f.EnableMemMappedAccessOnWrite);
                    f.EnableMemMappedAccessOnWrite = false;
                    Assert.IsFalse(f.EnableMemMappedAccessOnWrite);
                    f.EnableMemMappedAccessOnWrite = true;
                    Assert.IsTrue(f.EnableMemMappedAccessOnWrite);

                    Assert.Throws <InvalidOperationException>(f.InitializeNewFile);
                    Assert.Throws <InvalidOperationException>(() => { f.BaseVersion = new Version(1, 1); });

                    AfterInitValidation(f, true, fileName);
                }

                temp.Close();                  // allowed after disposing
                ((IDisposable)temp).Dispose(); // disposing multiple times is ok

#pragma warning disable 168
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.Tag; });
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.EnableMemMappedAccessOnRead; });
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.EnableMemMappedAccessOnWrite; });
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.Count; });
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.IsEmpty; });
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.ItemSize; });
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.NonGenericSerializer; });
                Assert.Throws <ObjectDisposedException>(() => { var v = temp.Serializer; });
#pragma warning restore 168

                Assert.IsTrue(temp.IsInitialized);
                Assert.IsTrue(temp.IsDisposed);
                Assert.IsFalse(temp.IsOpen);
                Assert.AreEqual(fileName, temp.FileName);
                Assert.AreEqual(typeof(byte), temp.ItemType);


                using (
                    var f =
                        (BinIndexedFile <byte>)BinaryFile.Open(fileName, AllowCreate, LegacyResolver))
                {
                    AfterInitValidation(f, true, fileName);
                    f.Close();
#pragma warning disable 168
                    Assert.Throws <ObjectDisposedException>(() => { var v = f.Tag; });
#pragma warning restore 168

                    Assert.IsTrue(f.IsInitialized);
                    Assert.IsTrue(f.IsDisposed);
                    Assert.IsFalse(f.IsOpen);
                    Assert.AreEqual(fileName, f.FileName);
                }
            }

            using (var f = (BinIndexedFile <byte>)BinaryFile.Open(fileName, false, LegacyResolver))
            {
                AfterInitValidation(f, false, fileName);
                ((IDisposable)f).Dispose();
#pragma warning disable 168
                Assert.Throws <ObjectDisposedException>(() => { var v = f.Tag; });
#pragma warning restore 168
            }

            using (var f = new BinIndexedFile <byte>(fileName))
            {
                Assert.Throws <IOException>(f.InitializeNewFile);

                if (RunMode == Mode.OneTime)
                {
                    File.Delete(fileName);
                    f.InitializeNewFile();
                }
            }
        }