/// <summary> // Parse a single chunk of bytes into a single data index. /// </summary> /// <returns>DataIndex object containing record info.</returns> /// <param name="bytes">The array of Bytes to parse (Must be 32 Bytes).</param> public static BlobIndex Parse(byte[] bytes) { if (bytes == null) { throw new ArgumentNullException("byte array is required."); } if (bytes.Length != 32) { throw new ArgumentException("byte array of length 32 is required."); } // If the system architecture is little-endian (that is, little end first), // reverse the byte array. if (BitConverter.IsLittleEndian) { Array.Reverse(bytes); } // create an empty index and then set properties from loaded bytes. var dataIndex = new BlobIndex(0, 0, 0, 0); dataIndex.DocumentKey = BitConverter.ToInt64(bytes, 0); dataIndex.Pointer = BitConverter.ToInt64(bytes, 8); dataIndex.RecordLength = BitConverter.ToInt32(bytes, 16); dataIndex.PaddingLength = BitConverter.ToInt32(bytes, 20); // return the data index. return(dataIndex); }
/// <summary> /// Updates the data index located in the data index file. /// </summary> /// <param name="dataIndex">The changed data index to update.</param> public void UpdateIndex(BlobIndex dataIndex) { // add the index to the dataindex file. _binaryWriter.BaseStream.Position = dataIndex.Position; _binaryWriter.Write(dataIndex.GetBytes()); _binaryWriter.Flush(); _cache.Clear(); }
/// <summary> /// Adds the Data Index to the Data Index file and checks for duplicate. /// Note: The Data Index is the pointer to the record in the entites data file. /// </summary> /// <param name="searchKey">The Search Key (Primary Key) of the entity being stored in the data file.</param> /// <param name="pointer">The Pointer (data position) to where the entity is being stored in the data File.</param> /// <param name="length">The Length (number of bytes) of the data being stored in the data file.</param> /// <exception cref="ConcurrencyException">When a data index record is found having the same Search Key</exception> public void AddIndexCheckForDuplicate(BlobIndex dataIndex) { // make sure the index does not already exist if duplicate checking is enabled. if (DoesIndexExist(dataIndex.DocumentKey)) { throw new Exception("A Data Index record with this Search Key already exists."); } AddIndex(dataIndex); }
/// <summary> /// Adds the Data Index to the Data Index file. /// WARNING: This function does not check for duplicates and can cause problems with duplicate Data Index's. /// Only use AddIndex if you have some other method of making sure the Data Index's being stored are unique. /// e.g Using an Auto incrementing Search Key (Primary Key). Using AddIndexCheckForDuplicate is slower but makes sure there is no duplicates. /// Note: The Data Index is the pointer to the record in the entites data file. /// </summary> public void AddIndex(BlobIndex dataIndex) { // add the index to the dataindex file. _binaryWriter.BaseStream.Position = this.FileSize; _binaryWriter.Write(dataIndex.GetBytes()); _binaryWriter.Flush(); // advance the file size on. its better that we do it than call the length all the time as its quicker. this.FileSize += 32; //_cache.Clear(); }
/// <summary> /// Gets the data index for record. /// Returns null when at the end of the data index file. /// </summary> /// <returns>The index for record.</returns> /// <param name="recdordNumber">Record number.</param> public BlobIndex GetBlobIndex(long dataIndexNumber) { long positionOfIndex = (dataIndexNumber - 1) * 32; _binaryReader.BaseStream.Position = positionOfIndex; if (this.FileSize > positionOfIndex) { byte[] dataIndexBytes = _binaryReader.ReadBytes(32); var dataIndex = BlobIndex.Parse(dataIndexBytes); if (dataIndex.DocumentKey > 0) { dataIndex.Position = positionOfIndex; return(dataIndex); } } return(null); }
/// <summary> /// Gets the first available data index with space greater than the size specified in bytes. /// The index returned is the size greater than the length with the combined record length and padding bytes. /// </summary> /// <returns>The data index with space greater than length specified.</returns> /// <param name="dataLength">Data length in bytes.</param> public BlobIndex GetBlobIndexWithEnoughSpace(long dataLength) { long positionOfIndex = 0; _binaryReader.BaseStream.Position = positionOfIndex; // search the index cache before going to disk. foreach (var dataIndex in _cache) { if (dataIndex.RecordLength + dataIndex.PaddingLength > dataLength && dataIndex.DocumentKey != 0) { return(dataIndex); } } // position of where to continue the search from on disk. if (_cache.Count() > 0) { positionOfIndex = (_cache.Count() - 1) * 32; } // loop through the index until search key is found. _binaryReader.BaseStream.Position = positionOfIndex; while (_binaryReader.BaseStream.Position < this.FileSize) { // load the bytes, convert to index object and return byte[] dataIndexBytes = _binaryReader.ReadBytes(32); var dataIndex = BlobIndex.Parse(dataIndexBytes); // add the index to the read cache so next search is quicker. _cache.Add(dataIndex); // return the index if there is more space than specified. if (dataIndex.RecordLength + dataIndex.PaddingLength >= dataLength && dataIndex.DocumentKey != 0) { return(dataIndex); } } return(null); }
/// <summary> /// Adds an index to the Data Index file. /// Overwrites the first data index found with a 0 for its document key. /// </summary> public void AddIndexOverwriteDeleted(BlobIndex dataIndex) { // add the index to the dataindex file. _binaryReader.BaseStream.Position = 0; while (_binaryReader.BaseStream.Position < this.FileSize) { // load the bytes, convert to index object and return byte[] dataIndexBytes = _binaryReader.ReadBytes(32); var existingBlobIndex = BlobIndex.Parse(dataIndexBytes); // check if null if (existingBlobIndex.DocumentKey == 0) { _binaryWriter.BaseStream.Position = _binaryReader.BaseStream.Position; _binaryWriter.Write(dataIndex.GetBytes()); _binaryWriter.Flush(); return; } } // not found so add to end. AddIndex(dataIndex); }
/// <summary> /// Finds the Data Index in the file with the given Search Key (Primary Key). /// </summary> /// <returns>The Data Index containing the data record info.</returns> /// <param name="searchKey">The Document Key (Primary Key) of the entity to find.</param> public BlobIndex FindIndex(long documentKey) { if (documentKey == 0) { return(null); } // attempt to find the key by calculation first (Only for Autoincremented document collections). // if cant find then search the full index file. long positionOfIndex = (documentKey - 1) * 32; _binaryReader.BaseStream.Position = positionOfIndex; // dont attempt to load bytes when the document key is at the end of the file if (this.FileSize > positionOfIndex) { byte[] dataIndexBytes = _binaryReader.ReadBytes(32); var dataIndexInitial = BlobIndex.Parse(dataIndexBytes); if (dataIndexInitial.DocumentKey == documentKey) { dataIndexInitial.Position = positionOfIndex; return(dataIndexInitial); } } // search the index cache before going to disk. var fromCache = _cache.SingleOrDefault(key => key.DocumentKey == documentKey && key.DocumentKey > 0); if (fromCache != null) { return(fromCache); } // position of where to continue the search from on disk. if (_cache.Count() > 0) { positionOfIndex = (_cache.Count() - 1) * 32; } else { positionOfIndex = 0; } // loop through the index until search key is found. _binaryReader.BaseStream.Position = positionOfIndex; while (_binaryReader.BaseStream.Position < this.FileSize) { // load the bytes, convert to index object and return byte[] dataIndexBytes = _binaryReader.ReadBytes(32); var dataIndex = BlobIndex.Parse(dataIndexBytes); // add the index to the read cache so next search is quicker. _cache.Add(dataIndex); // if the document key matches return it. if (dataIndex.DocumentKey == documentKey) { dataIndex.Position = positionOfIndex; return(dataIndex); } } // the data index was not found in the data index file. return(null); }
public T Create(T entity) { if (entity == null) { throw new ArgumentNullException("Entity argument can't be null"); } // make sure the entity name matches the document store type. string requiredEntityName = entity.GetType().Name; if (_entityName != requiredEntityName) { throw new ArgumentException("Entity type is not valid for this data store."); } // make sure entity has key field if (!InternalReflectionHelper.PropertyExists(entity, "Id")) { throw new Exception("Entity must have an Id property and be of type short, integer or long." + "This is used as the primary key for the entity being stored."); } // load the document key from the entity as its needed for adding to the index. var documentKey = InternalReflectionHelper.GetPropertyValueInt64(entity, "Id"); // get the data store header so we can generate keys and store record counts. var header = GetDataHeader(); // boolean so we know to check for duplicate or not on insert. // duplicates only need checked when the user has specified the document key. bool checkForDuplicate = true; if (documentKey == 0) { checkForDuplicate = false; } // get the next document key from the data file header record. documentKey = header.GenerateNextRecord(documentKey); // update the entity value so that the callers entity gets the saved document key. InternalReflectionHelper.SetPropertyValue(entity, "Id", documentKey); // parse the document into a binary json document for storing in the data file. byte[] binaryJson = _serializer.Serialize <T>(entity); using (var _deletedDataIndexFile = new BlobIndexFile(_deletedDataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { // create the data index with the data pointer at the end of the document. // check to see if there is a deleted slot that can be used to store the data. var dataIndex = _deletedDataIndexFile.GetBlobIndexWithEnoughSpace(binaryJson.Length); if (dataIndex != null) { // assign this document key to the deleted index. dataIndex.ChangeDocumentKey(documentKey); dataIndex.UpdateRecordLength(binaryJson.Length); } else { // create a new data index. dataIndex = new BlobIndex(documentKey, this.FileSize, binaryJson.Length, this.PaddingFactor); // update the size of the datafile this.FileSize = this.FileSize + dataIndex.RecordLength + dataIndex.PaddingLength; } using (var _dataIndexFile = new BlobIndexFile(_dataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { // create the data index (AddIndex throws ConcurrencyException so no data will save) if (checkForDuplicate) { _dataIndexFile.AddIndexCheckForDuplicate(dataIndex); } else { _dataIndexFile.AddIndex(dataIndex); } } // remove the index from the deleted index file if it exists _deletedDataIndexFile.RemoveIndex(dataIndex.DocumentKey); using (var _fileStreamWriter = new FileStream(_dataFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read, (int)DiskBufferSizes.Default, FileOptions.SequentialScan)) { using (var _binaryWriter = new BinaryWriter(_fileStreamWriter)) { // add the data record to the data file _binaryWriter.BaseStream.Position = dataIndex.Pointer; // write the record _binaryWriter.Write(binaryJson); // write the padding. if (dataIndex.PaddingLength > 0) { _binaryWriter.Write(new Byte[dataIndex.PaddingLength]); } // save the data _binaryWriter.Flush(); } } // update the header record UpdateDataHeader(header); } return(entity); }