public void Add(string key, Stream data, StreamInfo parameters) { try { int fileNo = ++_fileCounter; Utility.ConsoleWriteLine($"Start adding file no: {fileNo}"); data.Dispose(); // Because this stream disposes quickly, I don't want it! if (key is null) { throw new CustomException(ExceptionKey.NullKey); } if (data is null) { throw new CustomException(ExceptionKey.NullData); } if (parameters is null) { throw new CustomException(ExceptionKey.NullParameters); } if (StorageIndex.Contains(key)) { throw new CustomException(ExceptionKey.DuplicateKey); } if (parameters.Length.HasValue && parameters.Length != data.Length) { throw new CustomException(ExceptionKey.StreamLengthNotMatched); } StorageIndex.Add(key); SetupWrite(key, fileNo, parameters).ConfigureAwait(false); } catch (CustomException ex) { switch (ex.ExceptionKey) { case ExceptionKey.NullKey: case ExceptionKey.NullData: case ExceptionKey.NullParameters: /// key is null, data is null, parameters is null throw new ArgumentNullException(); case ExceptionKey.StreamLengthNotMatched: case ExceptionKey.DuplicateKey: /// An element with the same key already exists or /// provided hash or length does not match the data. throw new ArgumentException(); default: throw new Exception(); } } catch (Exception) { //throw new CustomException(ExceptionKey.Unknown); throw; } }
/// <summary> /// initialize at first time /// </summary> private void Init() { StorageIndex.SetMaxIndexSize(_configuration.MaxIndexFile); // if the file has remained from the previous execution, it should be deleted // because its length may be larger then the current data if (File.Exists(_storageFilePath)) { File.Delete(_storageFilePath); } File.Create(_storageFilePath); _memoryManager.Value.FlushMemory(); // to prevent occasionally file is being used by another process error }
private async Task <Stream> SetupReadAsync(string key, int fileNo) { try { Utility.ConsoleWriteLine($"Start SetupReadAsync file: {fileNo}"); if (!StorageIndex.Contains(key)) { throw new CustomException(ExceptionKey.NoDataRelatedToKey); } byte[] byteArrayData = null; var(indexInformation, indexReference) = StorageIndex.Get(key); var convertor = new Convertor(); if (indexInformation.Data != null) // Use the cached data { byteArrayData = indexInformation.Data; } else { if (!indexInformation.IsStoringCompleted) // Check if index has not setup or data has already written on the file { /// If data has not already written, wait for it var cancellationTokenSource = new CancellationTokenSource(); var taskWriteCompleted = IsWriteCompleted(indexInformation, cancellationTokenSource.Token); var taskIsRemoved = IsRemoved(indexInformation, cancellationTokenSource.Token); await Task.WhenAny(taskWriteCompleted, taskIsRemoved).ContinueWith(action => { if (action.IsFaulted) { foreach (var ex in (action.Exception as AggregateException).InnerExceptions) { throw ex; } } }); cancellationTokenSource.Cancel(); if (taskWriteCompleted.IsCanceled) { /// Item is removed, so can't read anything return(null); } (indexInformation, indexReference) = StorageIndex.Get(key); // It may not filled already } int offset = convertor.ConvertByteArrayToInt(indexReference.Offset); int size = convertor.ConvertByteArrayToInt(indexReference.Size); Utility.ConsoleWriteLine($"Start ReadFileAsync file: {fileNo}"); byteArrayData = await ReadFileAsync(offset, size); Utility.ConsoleWriteLine($"End ReadFileAsync file: {fileNo}"); var crc = new Crc16().ComputeChecksumBytes(byteArrayData); if (!crc.IsEqual(indexInformation.CRC)) { throw new CustomException(ExceptionKey.CorruptedData); } CheckCaching(indexInformation, byteArrayData); } var streamData = (indexInformation.IsCompressed ? convertor.DecompressAsync(byteArrayData).Result : convertor.ConvertByteArrayToStream(byteArrayData)); Utility.ConsoleWriteLine($"End reading file: {fileNo} - Read Files Count: {++_readFilesCounter}"); return(streamData); } catch (CustomException ex) { switch (ex.ExceptionKey) { case ExceptionKey.NullKey: /// key is null. throw new ArgumentNullException(); case ExceptionKey.NoDataRelatedToKey: /// key does not exist throw new KeyNotFoundException(); case ExceptionKey.CorruptedData: case ExceptionKey.Unknown: default: throw new Exception(); } } catch (Exception ex) { throw; } }
public async Task SetupWrite(string key, int fileNo, StreamInfo parameters) { Utility.ConsoleWriteLine($"Start SetupWrite file: {fileNo}"); var cancellationTokenSource = new CancellationTokenSource(); try { var convertor = new Convertor(); var streamCompress = new FileStream(key, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true); int inputSize = (int)streamCompress.Length; bool isCompressionNeeded = IsCompressionNeeded(parameters.IsCompressed, inputSize); byte[] storageData = null; byte[] crc = null; int dataLength = 0; Utility.ConsoleWriteLine($"Start Compressing file: {fileNo}"); var taskCompress = (isCompressionNeeded ? convertor.CompressAsync(streamCompress) : convertor.ConvertStreamToByteArrayAsync(streamCompress)) .ContinueWith(action => { streamCompress.Dispose(); if (action.IsFaulted) { foreach (var ex in (action.Exception as AggregateException).InnerExceptions) { throw ex; } } storageData = action.Result; Utility.ConsoleWriteLine($"End Compressing file: {fileNo}"); crc = new Crc16().ComputeChecksumBytes(storageData); dataLength = storageData.Length; }); byte[] hashedData = null; // I used 2 different streams to prevent conflict in opertaions var streamMD5 = new FileStream(key, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true); Utility.ConsoleWriteLine($"Start Md5 file: {fileNo}"); var taskMd5 = convertor.GetMd5HashAsync(streamMD5) .ContinueWith(action => { streamMD5.Dispose(); if (action.IsFaulted) { foreach (var ex in (action.Exception as AggregateException).InnerExceptions) { throw ex; } } hashedData = action.Result; Utility.ConsoleWriteLine($"End Md5 file: {fileNo}"); if (parameters.Hash != null && !parameters.Hash.IsEqual(hashedData)) { throw new CustomException(ExceptionKey.HashNotMatched); } }); await Task.WhenAll(taskCompress, taskMd5); int offset = StorageIndex.SetContent(key, dataLength, hashedData, crc, isCompressionNeeded); Utility.ConsoleWriteLine($"Start WriteFileAsync file: {fileNo}"); var _ = WriteFileAsync(storageData, offset, cancellationTokenSource.Token) .ContinueWith(action => { if (action.IsFaulted) { foreach (var ex in (action.Exception as AggregateException).InnerExceptions) { throw ex; } } StorageIndex.CompleteStoring(key); Utility.ConsoleWriteLine($"End WriteFileAsync file: {fileNo} - Writen Files Count: {++_writtenFilesCounter}"); }) .ConfigureAwait(false); // I do not want it to wait if (_configuration.MaxStorageFile > 0 && offset + dataLength > _configuration.MaxStorageFile) // if the file will exceed the max size { throw new CustomException(ExceptionKey.StorageFileSizeExceeded); } } catch (CustomException ex) { StorageIndex.Remove(key); cancellationTokenSource.Cancel(); switch (ex.ExceptionKey) { case ExceptionKey.HashNotMatched: case ExceptionKey.DuplicateData: /// An element with the same key already exists or /// provided hash or length does not match the data. throw new ArgumentException(); case ExceptionKey.IndexSizeExceeded: case ExceptionKey.StorageFileSizeExceeded: /// I/O exception occurred during persisting data throw new IOException(); case ExceptionKey.Unknown: default: throw new Exception(); } } catch (Exception ex) { StorageIndex.Remove(key); cancellationTokenSource.Cancel(); //throw new CustomException(ExceptionKey.Unknown); throw; } finally { _memoryManager.Value.OptimizeMemoryConsumption(); } }
public bool Contains(string key) { return(StorageIndex.Contains(key)); }