Пример #1
0
        public void CreateDataIndexGetBytesLengthOf32()
        {
            var dataIndex = new DataIndex(1, 1, 1, 50);
            var bytes = dataIndex.GetBytes();

            int count = bytes.Length;
            Assert.AreEqual(32, count);
        }
Пример #2
0
        public void UpdateRecordLengthCheckDoesNotRequireRelocation()
        {
            // create the initial index and convert to bytes.
            var dataIndex = new DataIndex(1, 1, 100, 100);
            Assert.IsFalse(dataIndex.RequiresRelocation);

            dataIndex.UpdateRecordLength(200);
            Assert.IsFalse(dataIndex.RequiresRelocation);
        }
Пример #3
0
        public void CreateDataIndexGetBytesAndConvertBackToDataIndex()
        {
            // create the initial index and convert to bytes.
            var dataIndex = new DataIndex(1, 1, 1, 50);
            var bytes = dataIndex.GetBytes();

            // convert the bytes back into a data index structure
            var newDataIndex = DataIndex.Parse(bytes);

            Assert.AreEqual(dataIndex.DocumentKey, newDataIndex.DocumentKey);
            Assert.AreEqual(dataIndex.Pointer, newDataIndex.Pointer);
            Assert.AreEqual(dataIndex.RecordLength, newDataIndex.RecordLength);
        }
 public void Insert1000DataIndexEntriesWithDuplicateCheckingDisabled()
 {
     Delete ();
     using (var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath)) {
         for (int i = 1; i < 1001; i++) {
             var dataIndex = new DataIndex (i, i, i, 50);
             dataIndexProcessor.AddIndex (dataIndex);
             #if DEBUG
             Console.WriteLine("INSERT: {0}", i);
             #endif
         }
     }
     Assert.IsTrue (true);
 }
Пример #5
0
 public void AddDataIndexFailsDuplicate()
 {
     Delete();
     //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath);
     var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath);
     bool duplicateFound = false;
     try {
         var dataIndex = new DataIndex(1, 1, 100, 50);
         dataIndexProcessor.AddIndexCheckForDuplicate(dataIndex);
         dataIndexProcessor.AddIndexCheckForDuplicate(dataIndex);
     } catch (ConcurrencyException ex) {
         if (!String.IsNullOrEmpty (ex.Message))
             duplicateFound = true;
         #if DEBUG
         Console.WriteLine(ex.Message);
         #endif
     }
     Assert.True(duplicateFound);
 }
Пример #6
0
        public void DeleteDataIndexSuccess()
        {
            Delete();
            //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath);
            var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath);
            var dataIndex = new DataIndex(1, 1, 100, 50);
            dataIndexProcessor.AddIndex(dataIndex);
            dataIndexProcessor.RemoveIndex(1);

            bool doesExist = dataIndexProcessor.DoesIndexExist(1);
            Assert.False (doesExist);
        }
        public void Insert1000DataIndexEntriesWithDuplicateCheckingEnabledWith8KBReadCache()
        {
            Delete ();
            using (var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath, DiskBufferSize.Small, DiskBufferSize.Small)) {

                for (int i = 1; i < 1001; i++) {
                    var dataIndex = new DataIndex (i, i, i, 50);
                    dataIndexProcessor.AddIndexCheckForDuplicate (dataIndex);
                    #if DEBUG
                Console.WriteLine("INSERT: {0}", i);
                    #endif
                }
            }
            Assert.IsTrue (true);
        }
        public void Insert1000DataIndexEntriesWithDuplicateCheckingEnabledRandomRead()
        {
            Delete ();
            using (var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath, DiskBufferSize.Default, DiskBufferSize.Default)) {

                // adding a random index forces the check for duplicates to search the full index.
                var dataIndex2 = new DataIndex (100000000, 100, 100, 50);
                dataIndexProcessor.AddIndexCheckForDuplicate (dataIndex2);

                for (int i = 1; i < 1001; i++) {
                    var dataIndex = new DataIndex (i, i, i, 50);
                    dataIndexProcessor.AddIndexCheckForDuplicate (dataIndex);
                    #if DEBUG
                    Console.WriteLine("INSERT: {0}", i);
                    #endif
                }
            }
            Assert.IsTrue (true);
        }
Пример #9
0
        /// <summary>
        // Parse a single chunk of bytes into a single data index.
        /// </summary>
        /// <returns>DataIndex object containing record info.</returns>
        /// <param name="bytes">The array of Bytes to parse (Must be 32 Bytes).</param>
        public static DataIndex Parse(byte[] bytes)
        {
            if (bytes == null)
                throw new ArgumentNullException("byte array is required.");
            if (bytes.Length != 32)
                throw new ArgumentException("byte array of length 32 is required.");

            // If the system architecture is little-endian (that is, little end first),
            // reverse the byte array.
            if (BitConverter.IsLittleEndian)
                Array.Reverse(bytes);

            // create an empty index and then set properties from loaded bytes.
            var dataIndex = new DataIndex(0, 0, 0, 0);
            dataIndex.DocumentKey = BitConverter.ToInt64(bytes, 0);
            dataIndex.Pointer = BitConverter.ToInt64(bytes, 8);
            dataIndex.RecordLength = BitConverter.ToInt32(bytes, 16);
            dataIndex.PaddingLength = BitConverter.ToInt32(bytes, 20);

            // return the data index.
            return dataIndex;
        }
Пример #10
0
 /// <summary>
 /// Updates the data index located in the data index file.
 /// </summary>
 /// <param name="dataIndex">The changed data index to update.</param>
 public void UpdateIndex(DataIndex dataIndex)
 {
     // add the index to the dataindex file.
     _binaryWriter.BaseStream.Position = dataIndex.Position;
     _binaryWriter.Write(dataIndex.GetBytes());
     _binaryWriter.Flush();
     _cache.Clear();
 }
Пример #11
0
        /// <summary>
        /// Adds an index to the Data Index file.
        /// Overwrites the first data index found with a 0 for its document key.
        /// </summary>
        public void AddIndexOverwriteDeleted(DataIndex dataIndex)
        {
            // add the index to the dataindex file.
            _binaryReader.BaseStream.Position = 0;
            while(_binaryReader.BaseStream.Position < this.FileSize){

                // load the bytes, convert to index object and return
                byte[] dataIndexBytes = _binaryReader.ReadBytes(32);
                var existingDataIndex = DataIndex.Parse(dataIndexBytes);

                // check if null
                if (existingDataIndex.DocumentKey == 0){
                    _binaryWriter.BaseStream.Position = _binaryReader.BaseStream.Position;
                    _binaryWriter.Write(dataIndex.GetBytes());
                    _binaryWriter.Flush();
                    return;
                }
            }

            // not found so add to end.
            AddIndex(dataIndex);
        }
Пример #12
0
        /// <summary>
        /// Adds the Data Index to the Data Index file and checks for duplicate.
        /// Note: The Data Index is the pointer to the record in the entites data file.
        /// </summary>
        /// <param name="searchKey">The Search Key (Primary Key) of the entity being stored in the data file.</param>
        /// <param name="pointer">The Pointer (data position) to where the entity is being stored in the data File.</param>
        /// <param name="length">The Length (number of bytes) of the data being stored in the data file.</param>
        /// <exception cref="ConcurrencyException">When a data index record is found having the same Search Key</exception>
        public void AddIndexCheckForDuplicate(DataIndex dataIndex)
        {
            // make sure the index does not already exist if duplicate checking is enabled.
            if (DoesIndexExist (dataIndex.DocumentKey))
                throw new ConcurrencyException ("A Data Index record with this Search Key already exists.");

            AddIndex(dataIndex);
        }
Пример #13
0
        /// <summary>
        /// Adds the Data Index to the Data Index file.
        /// WARNING: This function does not check for duplicates and can cause problems with duplicate Data Index's.
        /// Only use AddIndex if you have some other method of making sure the Data Index's being stored are unique.
        /// e.g Using an Auto incrementing Search Key (Primary Key). Using AddIndexCheckForDuplicate is slower but makes sure there is no duplicates.
        /// Note: The Data Index is the pointer to the record in the entites data file.
        /// </summary>
        public void AddIndex(DataIndex dataIndex)
        {
            // add the index to the dataindex file.
            _binaryWriter.BaseStream.Position = this.FileSize;
            _binaryWriter.Write(dataIndex.GetBytes());
            _binaryWriter.Flush();

            // advance the file size on. its better that we do it than call the length all the time as its quicker.
            this.FileSize += 32;

            //_cache.Clear();
        }
Пример #14
0
 public void CreateDataIndexWith50PercentPaddingFactorResultIn50BytesPaddingLength()
 {
     var dataIndex = new DataIndex(1, 1, 100, 50);
     Assert.AreEqual(50, dataIndex.PaddingLength);
 }
Пример #15
0
        public void AddDataIndexSuccess()
        {
            Delete();
            //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath);
            var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath);
            var dataIndex = new DataIndex(1, 1, 100, 50);
            dataIndexProcessor.AddIndex(dataIndex);

            Assert.True (true);
        }
Пример #16
0
        public void GetDataIndexWithEnoughSpaceReturnsNull()
        {
            Delete();
            //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath);
            var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath);

            // insert an index with a total space of 200 bytes (100 * 0.2)
            var dataIndex = new DataIndex(1, 1, 100, 50);
            dataIndexProcessor.AddIndexCheckForDuplicate(dataIndex);

            var loaded = dataIndexProcessor.GetDataIndexWithEnoughSpace(500);
            Assert.IsNull(loaded);
        }
Пример #17
0
        public void DeleteIndexAddIndexCheckOverwrite()
        {
            Delete();
            var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath);

            // insert an index with a total space of 200 bytes (100 * 0.2)
            var dataIndex = new DataIndex(1, 1, 100, 0);
            var dataIndex2 = new DataIndex(2, 1, 100, 0);
            dataIndexProcessor.AddIndexOverwriteDeleted(dataIndex);
            dataIndexProcessor.RemoveIndex(1);

            var fileSizeBeforeAdd = dataIndexProcessor.FileSize;
            dataIndexProcessor.AddIndexOverwriteDeleted(dataIndex2);

            Assert.AreEqual(fileSizeBeforeAdd, dataIndexProcessor.FileSize);
        }
Пример #18
0
        public void UpdateRecordLengthNoPaddingFactorCheckDoesRequireRelocation()
        {
            // create the initial index and convert to bytes.
            var dataIndex = new DataIndex(1, 1, 100, 0);
            Assert.IsFalse(dataIndex.RequiresRelocation);

            dataIndex.UpdateRecordLength(101);
            Assert.IsTrue(dataIndex.RequiresRelocation);
        }