private const long BlockSize = 1048576; // 1 megabyte static void Main(string[] args) { var errMessage = CheckArguments(args); if (errMessage != null) { Console.WriteLine(errMessage); return; } #region Command line arguments var mode = args[0]; var sourceFile = args[1]; var destinationFile = args[2]; ushort compressorsCount = (ushort)Environment.ProcessorCount; if (args.Length == 4) { ushort.TryParse(args[3], out compressorsCount); } #endregion var reader = new Reader(BlockSize); var writer = new Writer(BlockSize); var poolInput = new BlockingCollection <Job>(10); var writerInput = new BlockingCollection <Job>(10); var compressorPool = new CompressorPool(compressorsCount, poolInput, writerInput); var writerThread = new Thread(() => writer.Start(destinationFile, writerInput, compressorPool, reader)); var compressorPoolThread = new Thread(() => compressorPool.Start()); writerThread.IsBackground = true; compressorPoolThread.IsBackground = true; writerThread.Start(); compressorPoolThread.Start(); if (mode == "compress") { reader.ProcessOriginalFile(sourceFile, writerInput, poolInput, writer, compressorPool); } else // "decompress" { reader.ProcessCompressedFile(sourceFile, writerInput, poolInput, writer, compressorPool); } writerThread.Join(); compressorPoolThread.Join(); return; }
public void Start(string fileName, BlockingCollection <Job> jobs, CompressorPool compressors, Reader reader) { var fileInfo = new FileInfo(fileName); if (IsFileLocked(fileInfo)) { Console.WriteLine($"Error: file {fileName} is locked!\n"); compressors.Stop(); reader.Stop(); return; } fileInfo.Delete(); using (var stream = fileInfo.OpenWrite()) { long origFileLength = long.MaxValue; while (origFileLength > 0) { Job job = null; SpinWait.SpinUntil(() => _stop || jobs.TryTake(out job)); if (_stop) { break; } switch (job.WorkType) { case WorkType.Header: Write(stream, job.Data); // Запись заголовка с размером всего файла до сжатия origFileLength = BitConverter.ToInt64(job.Data, 0); break; case WorkType.FileLength: origFileLength = BitConverter.ToInt64(job.Data, 0); break; case WorkType.Compress: // Записать ид куска var id = BitConverter.GetBytes(job.BlockId); Write(stream, id); // Записать размер куска до сжатия var origBlockSize = BitConverter.GetBytes(job.OriginalBlockSize); Write(stream, origBlockSize); // Записать размер сжатого куска var compressedBlockSize = BitConverter.GetBytes(job.Data.Length); Write(stream, compressedBlockSize); // Записать блок Write(stream, job.Data); break; case WorkType.Decompress: stream.Position = job.BlockId * _blockSize; Write(stream, job.Data); break; default: throw new Exception("Unknown work type in writer"); } origFileLength -= job.OriginalBlockSize; Console.WriteLine(job); } ; compressors.Stop(success: origFileLength == 0); } }
public void ProcessCompressedFile(string fileName, BlockingCollection <Job> writerInput, BlockingCollection <Job> compressorsInput, Writer writer, CompressorPool compressors) { using (var streamSource = File.OpenRead(fileName)) { var size = new byte[sizeof(long)]; streamSource.Read(size, 0, size.Length); long origFileLength = BitConverter.ToInt64(size, 0); if (origFileLength > FileMaxSize || origFileLength < 0) { Console.WriteLine("Error: wrong compressed file format."); writer.Stop(); compressors.Stop(); return; } var fileLengthJob = new Job(WorkType.FileLength, size); SpinWait.SpinUntil(() => _stop || writerInput.TryAdd(fileLengthJob)); if (_stop) { return; } long blockId; long origBlockSize = _blockSize; int compressedBlockSize; long workingSet = Process.GetCurrentProcess().WorkingSet64; while (origFileLength > 0) { var id = new byte[sizeof(long)]; streamSource.Read(id, 0, id.Length); blockId = BitConverter.ToInt16(id, 0); size = new byte[sizeof(long)]; streamSource.Read(size, 0, size.Length); origBlockSize = BitConverter.ToInt64(size, 0); if (origBlockSize > workingSet || origBlockSize > origFileLength) { Console.WriteLine($"Compressed file reading error. Original block size is larger than available memory or original block size is larger than original file length"); Console.WriteLine($"Original block size: {origBlockSize}, workingSet: {workingSet}, block id: {blockId}, original file length: {origFileLength}"); compressors.Stop(); writer.Stop(); break; } size = new byte[sizeof(int)]; streamSource.Read(size, 0, size.Length); compressedBlockSize = BitConverter.ToInt32(size, 0); if (compressedBlockSize > workingSet) { Console.WriteLine($"Compressed file reading error. CompressedBlockSize ({compressedBlockSize}) > workingSet ({workingSet})"); compressors.Stop(); writer.Stop(); break; } var compressedBlock = new byte[compressedBlockSize]; streamSource.Read(compressedBlock, 0, compressedBlock.Length); var job = new Job(WorkType.Decompress, compressedBlock, blockId, origBlockSize); SpinWait.SpinUntil(() => _stop || compressorsInput.TryAdd(job)); if (_stop) { break; } origFileLength -= origBlockSize; if (origFileLength <= origBlockSize) { origBlockSize = origFileLength; } } } }
public void ProcessOriginalFile(string fileName, BlockingCollection <Job> writerInput, BlockingCollection <Job> compressorsInput, Writer writer, CompressorPool compressors) { using (var streamSource = File.OpenRead(fileName)) { long fileLength = streamSource.Length; if (fileLength > FileMaxSize) { Console.WriteLine("File is too big to be compressed by this program. Max size is 32GB."); writer.Stop(); compressors.Stop(); return; } byte[] size = BitConverter.GetBytes(fileLength); var headerJob = new Job(WorkType.Header, size); SpinWait.SpinUntil(() => _stop || writerInput.TryAdd(headerJob)); if (_stop) { return; } var blockSize = fileLength < _blockSize ? fileLength : _blockSize; long blockId = 0; while (fileLength > 0) { byte[] data = new byte[blockSize]; streamSource.Read(data, 0, data.Length); var compressJob = new Job(WorkType.Compress, data, blockId, blockSize); SpinWait.SpinUntil(() => _stop || compressorsInput.TryAdd(compressJob)); if (_stop) { break; } fileLength -= blockSize; if (fileLength <= blockSize) { blockSize = fileLength; } blockId++; } } }