public async Task RunOutline(FileInfo outputFile, PlotConverterCheckpoint checkpoint) { var blockSize = m_InputPlotFile.Nonces * Constants.SCOOP_SIZE; m_InputPlotFile.Validate(); var outputStream = new FileStream(outputFile.FullName, FileMode.CreateNew, FileAccess.Write, FileShare.None, 4096, true); var inputStream = new FileStream(m_InputFile.FullName, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, true); outputStream.SetLength(m_InputPlotFile.RealPlotSize); //Allocate memory await ShufflePoc1To2(inputStream, outputStream, blockSize, m_InputPlotFile.Nonces, m_Partitions, checkpoint); inputStream.Close(); outputStream.Close(); }
public async Task RunInline(PlotConverterCheckpoint checkpoint) { var blockSize = m_InputPlotFile.Nonces * Constants.SCOOP_SIZE; m_InputPlotFile.Validate(); var handle = new FileStream(m_InputFile.FullName, FileMode.Open, FileAccess.Read | FileAccess.Write, FileShare.None, 4096, true); var done = await ShufflePoc1To2(handle, handle, blockSize, m_InputPlotFile.Nonces, m_Partitions, checkpoint); handle.Close(); if (done) { m_InputPlotFile.Rename(m_InputPlotFile.Poc2FileName); } }
async Task <bool> ShufflePoc1To2(FileStream sourceStream, FileStream destinationStream, long blockSize, long numnonces, int partitions, PlotConverterCheckpoint checkpoint) { var totalIterations = Constants.SCOOPS_IN_NONCE / 2 * numnonces; var iterationPosition = 0L; var stopwatch = Stopwatch.StartNew(); var timer = Observable.Timer(TimeSpan.Zero, m_ProgressIntervall); var timerSubscription = timer.Subscribe(ticks => { var position = iterationPosition; var elapsedTime = stopwatch.Elapsed; var iterationsRemaining = totalIterations - position; var percentage = (double)position / totalIterations * 100.0; var remainingTime = iterationsRemaining == 0 ? TimeSpan.Zero : (position == 0 ? TimeSpan.MaxValue : TimeSpan.FromTicks(Convert.ToInt64((double)elapsedTime.Ticks / position * iterationsRemaining))); Progress.OnNext(new ProgressEventArgs(elapsedTime, remainingTime, percentage, IsPausedUnsafe())); }); var adjustedBlockSize = blockSize * partitions; if (checkpoint.Position % adjustedBlockSize != 0) { throw new PlotConverterException($"Can not resume plot conversion with checkpoint {checkpoint.Position} and {partitions} partitions."); } var resumeScoopIndex = checkpoint.Position / adjustedBlockSize; var buffer1 = new byte[adjustedBlockSize]; var buffer2 = new byte[adjustedBlockSize]; for (var scoopIndex = resumeScoopIndex; scoopIndex < Constants.SCOOPS_IN_NONCE / 2 / partitions; scoopIndex++) { var pos = scoopIndex * adjustedBlockSize; await Retry <IOException>( async() => await Read(sourceStream, buffer1, buffer2, pos, adjustedBlockSize).ConfigureAwait(false), (error, retry) => m_Log.Warn(error, $"Error during read operation. Starting retry no {retry} in {m_TimeBetweenNextRetry.ToReadableString()}."), e => { Checkpoint = new PlotConverterCheckpoint(pos); return(new PlotConverterException($"Max retry of {m_RetryCount} during read operation exceeded. Details: {e}.")); }).ConfigureAwait(false); if (partitions == 1) { var hash1 = new byte[Constants.SHABAL256_HASH_SIZE]; var off = 32; for (var nonceIndex = 0; nonceIndex < numnonces; nonceIndex++) { iterationPosition++; Buffer.BlockCopy(buffer1, off, hash1, 0, Constants.SHABAL256_HASH_SIZE); Buffer.BlockCopy(buffer2, off, buffer1, off, Constants.SHABAL256_HASH_SIZE); Buffer.BlockCopy(hash1, 0, buffer2, off, Constants.SHABAL256_HASH_SIZE); off += Constants.SCOOP_SIZE; } } else { Parallel.For(0, partitions, partitionIndex => { var hash1 = new byte[Constants.SHABAL256_HASH_SIZE]; for (var nonceIndex = 0; nonceIndex < numnonces; nonceIndex++) { var off = nonceIndex * Constants.SCOOP_SIZE + 32; Buffer.BlockCopy(buffer1, partitionIndex * (int)numnonces * Constants.SCOOP_SIZE + off, hash1, 0, Constants.SHABAL256_HASH_SIZE); Buffer.BlockCopy(buffer2, (partitions - partitionIndex - 1) * (int)numnonces * Constants.SCOOP_SIZE + off, buffer1, partitionIndex * (int)numnonces * Constants.SCOOP_SIZE + off, Constants.SHABAL256_HASH_SIZE); Buffer.BlockCopy(hash1, 0, buffer2, (partitions - partitionIndex - 1) * (int)numnonces * Constants.SCOOP_SIZE + off, Constants.SHABAL256_HASH_SIZE); } Interlocked.Add(ref iterationPosition, numnonces); }); } if (m_CancellationTokenSource.IsCancellationRequested) { Checkpoint = new PlotConverterCheckpoint(pos); break; } await Retry <IOException>( async() => await Write(destinationStream, buffer1, buffer2, pos, adjustedBlockSize).ConfigureAwait(false), (error, retry) => m_Log.Warn(error, $"Error during write operation. Starting retry no {retry} in {m_TimeBetweenNextRetry.ToReadableString()}."), e => { Checkpoint = null; return(new PlotConverterException( $"Max retry of {m_RetryCount} during write operation exceeded. Details: {e}.")); }).ConfigureAwait(false); } stopwatch.Stop(); timerSubscription.Dispose(); if (!m_CancellationTokenSource.IsCancellationRequested) { Progress.OnNext(new ProgressEventArgs(stopwatch.Elapsed, TimeSpan.Zero, 100, false)); } return(!m_CancellationTokenSource.IsCancellationRequested); }