private void InitializeModules(ChunkedHandler chunkedHandler, FileInfo destination) { _chunkedHandler = chunkedHandler ?? throw new ArgumentNullException(nameof(chunkedHandler)); _writer = new ChunkedFileWriter(destination, _token.Token); _reader.ChunkRead += (chunk, isLastChunk) => chunkedHandler.AddChunkToQueue(chunk, isLastChunk); _reader.OnError += error => OnError($"While reading: {error.GetException().Message}"); _chunkedHandler.ChunkHandled += (chunk, isLastChunk) => { if (_token.IsCancellationRequested) { return; } while (Interlocked.CompareExchange(ref _chunkIndex, 0, 0) != chunk.Index) { _chunkWriteEvent.WaitOne(); } _chunkWriteEvent.Reset(); _writer.WriteChunk(chunk, isLastChunk); Interlocked.Increment(ref _chunkIndex); _chunkWriteEvent.Set(); if (isLastChunk) { _allBytesWritten.Set(); } }; _chunkedHandler.OnError += error => OnError($"While handling: {error.GetException().Message}"); int decimalPlaces = 2; _writer.ChunkWritten += (chunk, isLastChunk) => { double percent = Math.Round((double)_reader.TotalBytesRead / _reader.TotalBytes * 100, decimalPlaces); PercentProgressChanged?.Invoke(percent); }; _writer.OnError += error => OnError($"While writting: {error.GetException().Message}"); }
private void WriteRandomBytes(FileInfo fileInfo, int chunksCount) { using (var writer = new ChunkedFileWriter(fileInfo, new System.Threading.CancellationToken())) { for (int i = 0; i < chunksCount; i++) { var chunk = GenerateChunk(i); writer.WriteChunk(chunk, i + 1 == chunksCount); } } }
public void FileWrite() { using (var writer = new ChunkedFileWriter(_writeFileInfo, new System.Threading.CancellationToken())) { writer.ChunkWritten += (chunk, isLastChunk) => _proccessedWriteBytes += chunk.Bytes.Length; writer.OnError += error => Assert.Fail(error.GetException().StackTrace); var generator = new FileGenerator(_maxChunkSize); for (int i = 0; i < _chunksCount; i++) { writer.WriteChunk(generator.GenerateChunk(i), i + 1 == _chunksCount); } } Assert.AreEqual(_proccessedWriteBytes, _writeFileInfo.Length); }
public void FileServiceRestoreAsyncTest() { SQLiteDatabase db = new SQLiteDatabase(".\\", nameof(FileServiceRestoreAsyncTest)); FileService fmSvc = new FileService(new DaoRepository(db)); fmSvc.ChunkLength = 111299; ConsoleLogger logger = new ConsoleLogger(); logger.AddDetails = false; logger.StartLoggingThread(); FileInfo testDataFile = new FileInfo("C:\\Bam\\Data\\Test\\TestDataFile.dll"); ChunkedFileDescriptor chunkedFile = fmSvc.StoreFileChunks(testDataFile); FileInfo writeTo = new FileInfo($".\\{nameof(FileServiceRestoreAsyncTest)}_restored.dat"); DateTime start = DateTime.UtcNow; ChunkedFileWriter writer = ChunkedFileWriter.FromFileHash(fmSvc, chunkedFile.FileHash, logger); writer.Write(writeTo.FullName).Wait(); TimeSpan took = DateTime.UtcNow.Subtract(start); FileInfo written = new FileInfo(writeTo.FullName); OutLine(took.ToString(), ConsoleColor.Cyan); Expect.IsTrue(written.Exists); Expect.AreEqual(testDataFile.Md5(), written.Md5(), "file content didn't match"); }
public ChunkedFileWriter GetFileWriter(string fileHash) { return(ChunkedFileWriter.FromFileHash(this, fileHash, Logger)); }
public void Setup() { _compressedFile = new FileInfo("makkonnell_c_sovershennyi_kod_master_klass.pdf.gz"); _decompressedFile = new FileInfo(Guid.NewGuid().ToString()); _decompressorWriter = new ChunkedFileWriter(_decompressedFile, new System.Threading.CancellationToken()); }
public void Setup() { _sourceFile = new FileInfo("makkonnell_c_sovershennyi_kod_master_klass.pdf"); _compressedFile = new FileInfo(Guid.NewGuid().ToString()); _compressorWriter = new ChunkedFileWriter(_compressedFile, new CancellationToken()); }