public void Delete(T t) { using (var _dataIndexFile = new BlobIndexFile(_dataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { var searchKey = InternalReflectionHelper.GetPropertyValueInt32 <T>(t, "Id"); var dataIndex = _dataIndexFile.FindIndex(searchKey); if (dataIndex == null) { return; } using (var _deletedDataIndexFile = new BlobIndexFile(_deletedDataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { // create a new deleted data index in deleted file pointing to deleted data. _deletedDataIndexFile.AddIndexOverwriteDeleted(dataIndex); } // no need to delete the data just mark the index entry as deleted _dataIndexFile.RemoveIndex(searchKey); // update the record count. var header = GetDataHeader(); header.RemoveRecord(); UpdateDataHeader(header); } }
public T Update(T entity) { if (entity == null) { throw new ArgumentNullException("Entity argument can't be null"); } // make sure the entity name matches the document store type. string requiredEntityName = entity.GetType().Name; if (_entityName != requiredEntityName) { throw new ArgumentException("Entity type is not valid for this data store."); } // make sure entity has key field if (!InternalReflectionHelper.PropertyExists(entity, "Id")) { throw new Exception("Entity must have an Id property and be of type short, integer or long." + "This is used as the primary key for the entity being stored."); } // load the document key from the entity as its needed for adding to the index. var documentKey = InternalReflectionHelper.GetPropertyValueInt64(entity, "Id"); using (var _dataIndexFile = new BlobIndexFile(_dataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { // search the data store for the document. var dataIndex = _dataIndexFile.FindIndex(documentKey); if (dataIndex == null) { throw new Exception("Could not find an existing entity to update."); } // either overwrite entity in existing data slot or append to the the end of the file. // parse the document into a binary json document byte[] binaryJson = _serializer.Serialize <T>(entity); // write the data record to the data file // update the record index to the documents data size. dataIndex.UpdateRecordLength(binaryJson.Length); using (var _fileStreamWriter = new FileStream(_dataFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read, (int)DiskBufferSizes.Default, FileOptions.SequentialScan)) { using (var _binaryWriter = new BinaryWriter(_fileStreamWriter)) { // check to see if the record needs relocated. // if it does then set the position to the end of the file. if (dataIndex.RequiresRelocation == true) { using (var _deletedDataIndexFile = new BlobIndexFile(_deletedDataIndexFilePath, DiskBufferSizes.Larger, DiskBufferSizes.Default)) { // create a deleted record pointer to the data file. _deletedDataIndexFile.AddIndexOverwriteDeleted(dataIndex); } // set position of the document to the end of the file _binaryWriter.BaseStream.Position = this.FileSize; // update the record pointer in the data index dataIndex.UpdateRecordPointer(_binaryWriter.BaseStream.Position, this.PaddingFactor); // update the file size. this.FileSize = this.FileSize + dataIndex.RecordLength + dataIndex.PaddingLength; } else { // set the position of where the document is in the datafile. _binaryWriter.BaseStream.Position = dataIndex.Pointer; } // make changes to the data _binaryWriter.Write(binaryJson); // write the padding bytes if (dataIndex.PaddingLength > 0) { _binaryWriter.Write(new Byte[dataIndex.PaddingLength]); } // update data index pointer to point to new location. _dataIndexFile.UpdateIndex(dataIndex); // save the data _binaryWriter.Flush(); } } } return(entity); }