public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { algorithm.Push(read.Buffer.Data, read.Buffer.Offset, Math.Min(read.Buffer.Count, read.Count)); Metainfo metainfo = context.Metainfo; byte[] expected = metainfo.Pieces[read.Piece].ToBytes(); byte[] hash = algorithm.Complete(); bool result = Bytes.Equals(hash, expected); bitfield[read.Piece] = result; algorithm.Dispose(); int next = Next(scope, read.Piece + 1); bool exists = context.View.Exists(next, 0); if (exists) { context.Queue.Add(new Start(bitfield, scope, next, block)); } else { block.Release(); onCompleted.Invoke(this); context.Bitfile.Write(bitfield); context.Hooks.CallDataVerified(metainfo.Hash, bitfield); } }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { long position = 0; int pieces = context.Metainfo.Properties.Pieces; int pieceSize = context.Metainfo.Properties.PieceSize; int blockSize = context.Metainfo.Properties.BlockSize; RepositoryAllocation allocation = new RepositoryAllocation(pieces); RepositoryViewAllocator allocator = new RepositoryViewAllocator(context.Dependencies.Files); foreach (MetainfoEntry entry in context.Metainfo.Entries) { string path = entry.GetPath(context.Parameters.Destination); FileInfo info = new FileInfo(path); if (info.Exists == false) { EnsureDirectoryExists(path); allocation.Add(entry, new RepositoryAllocationRange((int)(position / pieceSize), (int)((position + entry.Size) / pieceSize))); } position += entry.Size; } MetainfoEntry[] entries = context.Metainfo.Entries; RepositoryViewCache cache = allocator.Allocate(context.Parameters.Destination, entries, pieceSize, blockSize); context.View = new RepositoryView(cache); context.Hooks.CallDataAllocated(context.Metainfo.Hash, context.Parameters.Destination); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { onCompleted.Invoke(this); Action <byte[]> release = context.Dependencies.Memory.Release; DataBlock data = new FileBufferDataBlock(read, release); context.Hooks.CallBlockRead(context.Parameters.Hash, index, data); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { int blockSize = context.Metainfo.Properties.BlockSize; RepositoryMemoryBlock block = context.Dependencies.Memory.Allocate(blockSize); FileBuffer buffer = new FileBuffer(block.Data, 0, blockSize); context.View.Read(buffer, index.Piece.Index, index.Offset / blockSize, result => { context.Queue.Add(new Complete(index, result)); }); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { data.With((buffer, offset, count) => { int blockSize = context.Metainfo.Properties.BlockSize; FileBuffer file = new FileBuffer(buffer, offset, count); context.View.Write(file, index.Piece.Index, index.Offset / blockSize, args => { context.Queue.Add(new Complete(index, data)); }); }); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { algorithm.Push(read.Buffer.Data, read.Buffer.Offset, Math.Min(read.Buffer.Count, read.Count)); int step = block.Length / context.Metainfo.Properties.BlockSize; context.View.Read(block.Data, read.Piece, read.Block + step, args => { if (args.Count > 0 && context.View.Exists(args.Piece, args.Block + step)) { context.Queue.Add(new Continue(bitfield, scope, algorithm, args, block)); } else { context.Queue.Add(new Complete(bitfield, scope, algorithm, args, block)); } }); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { algorithm.Push(read.Buffer.Data, read.Buffer.Offset, Math.Min(read.Buffer.Count, read.Count)); Metainfo metainfo = context.Metainfo; byte[] expected = metainfo.Pieces[piece.Index].ToBytes(); byte[] hash = algorithm.Complete(); bool result = Bytes.Equals(hash, expected); AcceptIfRequired(context, result); RejectIfRequired(context, result); algorithm.Dispose(); block.Release(); onCompleted.Invoke(this); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { int blocksInBuffer = block.Length / context.Metainfo.Properties.BlockSize; int blocksInPiece = context.Metainfo.Properties.PieceSize / context.Metainfo.Properties.BlockSize; int step = Math.Min(blocksInBuffer, blocksInPiece); FileBuffer buffer = new FileBuffer(block.Data, 0, step * context.Metainfo.Properties.BlockSize); context.View.Read(buffer, piece, 0, args => { if (args.Count > 0 && context.View.Exists(args.Piece, args.Block + step)) { context.Queue.Add(new Continue(bitfield, scope, algorithm, args, block)); } else { context.Queue.Add(new Complete(bitfield, scope, algorithm, args, block)); } }); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { HashAlgorithm algorithm = SHA1.Create(); int bufferSize = context.Configuration.BufferSize; RepositoryMemoryBlock block = context.Dependencies.Memory.Allocate(bufferSize); int step = block.Length / context.Metainfo.Properties.BlockSize; context.View.Read(block.Data, piece.Index, 0, args => { if (args.Count > 0 && context.View.Exists(piece.Index, args.Block + step)) { context.Queue.Add(new Continue(piece, args, algorithm, block)); } else { context.Queue.Add(new Complete(piece, args, algorithm, block)); } }); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { int length = context.Metainfo.Pieces.Length; Bitfield bitfield = context.Bitfile.Read(); bitfield = bitfield ?? new Bitfield(length); Bitfield reduced = ReduceScope(bitfield); int next = Next(reduced, 0); if (next < length) { int bufferSize = context.Configuration.BufferSize; RepositoryMemoryBlock block = context.Dependencies.Memory.Allocate(bufferSize); context.Queue.Add(new Start(bitfield, reduced, next, block)); } else { onCompleted.Invoke(this); context.Hooks.CallDataVerified(context.Metainfo.Hash, bitfield); } }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { data.Release(); onCompleted.Invoke(this); context.Hooks.CallBlockWritten(context.Metainfo.Hash, index); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { context.View.Flush(); }