public void GetFile(string HiggsFileID, Action <long> transferProgress, Stream stream) { Debug.Assert(HiggsFileID.Length == 64); var hashes = GetPieceHashes(HiggsFileID); long progress = 0; //TODO: at some point parallelise this for (int i = 0; i < hashes.Length; i++) { if (_pieceCache.ContainsPiece(hashes[i])) { var data = _pieceCache.GetPieceFromCache(hashes[i]); progress += data.Length; data.CopyTo(stream); transferProgress(progress); } else { var higgsClient = new HiggsClient(EndpointUrl, _pieceCheckerUrl, _storageZoneId); var data = higgsClient.GetStream(hashes[i]); using (var myStream = new MemoryStream()) { data.CopyTo(myStream); byte[] streamBytes = myStream.ToArray(); progress += streamBytes.Length; stream.Write(streamBytes, 0, streamBytes.Length); transferProgress(progress); } } } }
public StoreFileResponse StoreFile(Stream file, Action <long> transferProgress) { var hashes = new List <string>(); var higgsClient = new HiggsClient(EndpointUrl, _pieceCheckerUrl, _storageZoneId); long read = 0; while (true) { var buffer = new byte[Maxpiecesize]; int nRead; nRead = file.Read(buffer, 0, Maxpiecesize); if (nRead == 0) { break; } if (nRead != Maxpiecesize) { //If the bytes did not fully occupy the original buffer, resize the buffer through a copy. var newBuffer = new byte[nRead]; Buffer.BlockCopy(buffer, 0, newBuffer, 0, nRead); buffer = newBuffer; //PeriodEnd of file has been reached if the buffer is not full. } var pieceHash = CalculatePieceHash(buffer); //Send to the store higgsClient.PostStream(pieceHash, new MemoryStream(buffer)); read += buffer.Length; transferProgress(read); hashes.Add(pieceHash); } var higgsID = CalculateHiggsFileIDFromPieceHashes(hashes.ToArray()); var response = new StoreFileResponse { PieceHashes = hashes.ToArray(), HiggsFileID = higgsID, WasAlreadyStored = false, Length = read }; CreateHiggsFile(higgsID, hashes.ToArray(), read, _storageZoneId); return(response); }
public StoreFileResponse StoreFile(Stream file, Action<long> transferProgress) { var hashes = new List<string>(); var higgsClient = new HiggsClient(EndpointUrl, _pieceCheckerUrl, _storageZoneId); long read = 0; while (true) { var buffer = new byte[Maxpiecesize]; int nRead; nRead = file.Read(buffer, 0, Maxpiecesize); if (nRead == 0) break; if (nRead != Maxpiecesize) { //If the bytes did not fully occupy the original buffer, resize the buffer through a copy. var newBuffer = new byte[nRead]; Buffer.BlockCopy(buffer, 0, newBuffer, 0, nRead); buffer = newBuffer; //PeriodEnd of file has been reached if the buffer is not full. } var pieceHash = CalculatePieceHash(buffer); //Send to the store higgsClient.PostStream(pieceHash, new MemoryStream(buffer)); read += buffer.Length; transferProgress(read); hashes.Add(pieceHash); } var higgsID = CalculateHiggsFileIDFromPieceHashes(hashes.ToArray()); var response = new StoreFileResponse { PieceHashes = hashes.ToArray(), HiggsFileID = higgsID, WasAlreadyStored = false, Length = read }; CreateHiggsFile(higgsID, hashes.ToArray(), read, _storageZoneId); return response; }
public void GetFile(string HiggsFileID, Action<long> transferProgress, Stream stream) { Debug.Assert(HiggsFileID.Length == 64); var hashes = GetPieceHashes(HiggsFileID); long progress = 0; //TODO: at some point parallelise this for (int i = 0; i < hashes.Length; i++) { if (_pieceCache.ContainsPiece(hashes[i])) { var data = _pieceCache.GetPieceFromCache(hashes[i]); progress += data.Length; data.CopyTo(stream); transferProgress(progress); } else { var higgsClient = new HiggsClient(EndpointUrl, _pieceCheckerUrl, _storageZoneId); var data = higgsClient.GetStream(hashes[i]); using (var myStream = new MemoryStream()) { data.CopyTo(myStream); byte[] streamBytes = myStream.ToArray(); progress += streamBytes.Length; stream.Write(streamBytes, 0, streamBytes.Length); transferProgress(progress); } } } }