public async ValueTask <IMemoryOwner <byte>?> ReadAsync(OmniHash rootHash, OmniHash targetHash, CancellationToken cancellationToken = default) { using (await _asyncLock.LockAsync()) { if (!_wantFileStatusMap.ContainsKey(rootHash)) { return(null); } var filePath = Path.Combine(Path.Combine(_configPath, this.OmniHashToFilePath(rootHash)), this.OmniHashToFilePath(targetHash)); if (!File.Exists(filePath)) { return(null); } using (var fileStream = new UnbufferedFileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.None, FileOptions.None, _bytesPool)) { var memoryOwner = _bytesPool.Memory.Rent((int)fileStream.Length); await fileStream.ReadAsync(memoryOwner.Memory); return(memoryOwner); } } }
public void same_as_file_stream_on_reads_with_bigger_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = BuildBytes(4096 * 128); File.WriteAllBytes(filename, bytes); using (var f = new FileStream(filename, FileMode.Open, FileAccess.Read)) { using ( var b = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.Read, FileShare.Read, false, 4096, 4096 * 4, false, 4096)) { var readf = new byte[4096]; var readb = new byte[4096]; for (var i = 0; i < 128; i++) { var totalf = f.Read(readf, 0, 4096); var totalb = b.Read(readb, 0, 4096); Assert.AreEqual(totalf, totalb); for (var j = 0; j < 4096; j++) { Assert.AreEqual(readf[j], readb[j]); } } } } }
private Stream GetWriteStream(string filename) { if (!_unbuffered) { return(new FileStream( _filename, FileMode.Open, FileAccess.ReadWrite, FileShare.Read, WriteBufferSize, FileOptions.SequentialScan)); } else { Log.Trace("Using unbuffered access for TFChunk '{0}'...", _filename); return(UnbufferedFileStream.Create( _filename, FileMode.Open, FileAccess.ReadWrite, FileShare.Read, false, 4096 * 1024, 4096, _writeThrough, 4096)); } }
public async ValueTask WriteAsync(OmniHash rootHash, OmniHash targetHash, ReadOnlyMemory <byte> memory, CancellationToken cancellationToken = default) { using (await _asyncLock.LockAsync()) { if (!_wantFileStatusMap.TryGetValue(rootHash, out var status) || !status.WantBlocks.Contains(targetHash)) { return; } var filePath = Path.Combine(Path.Combine(_configPath, this.OmniHashToFilePath(rootHash)), this.OmniHashToFilePath(targetHash)); if (File.Exists(filePath)) { return; } using (var fileStream = new UnbufferedFileStream(filePath, FileMode.Create, FileAccess.ReadWrite, FileShare.None, FileOptions.None, _bytesPool)) { await fileStream.WriteAsync(memory); } status.WantBlocks.Remove(targetHash); } }
public void when_reading_multiple_times_exact_page_size() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 4096 * 100 + 50); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var read = new byte[4096]; for (var i = 0; i < 100; i++) { var total = stream.Read(read, 0, 4096); Assert.AreEqual(4096 * (i + 1), stream.Position); Assert.AreEqual(4096, total); for (var j = 0; j < read.Length; j++) { Assert.AreEqual(j % 256, read[j]); } } var total2 = stream.Read(read, 0, 50); Assert.AreEqual(409600 + 50, stream.Position); Assert.AreEqual(50, total2); for (var j = 0; j < 50; j++) { Assert.AreEqual(j % 256, read[j]); } } }
public void when_reading_multiple_times_offset_page_size() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 4096 * 100 + 50); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Seek(50, SeekOrigin.Begin); var read = new byte[4096]; for (var i = 0; i < 100; i++) { if (i == 99) { Console.Write(""); } var total = stream.Read(read, 0, 4096); Assert.AreEqual(4096 * (i + 1) + 50, stream.Position); Assert.AreEqual(4096, total); for (var j = 0; j < read.Length; j++) { Assert.AreEqual((j + 50) % 256, read[j]); } } Assert.AreEqual(4096 * 100 + 50, stream.Position); } }
public void when_writing_multiple_times() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(256); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(256, stream.Position); stream.Flush(); Assert.AreEqual(256, stream.Position); stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(512, stream.Position); stream.Flush(); Assert.AreEqual(512, stream.Position); Assert.AreEqual(4096, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 512; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void when_writing_then_seeking_exact_to_alignment_and_writing_again() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(8192); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, 5012); Assert.AreEqual(5012, stream.Position); stream.Seek(4096, SeekOrigin.Begin); Assert.AreEqual(4096, stream.Position); bytes = GetBytes(15); stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(4111, stream.Position); stream.Flush(); Assert.AreEqual(4111, stream.Position); Assert.AreEqual(8192, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 255; i++) { Assert.AreEqual(i % 256, read[i]); } } }
private ReaderWorkItem CreateInternalReaderWorkItem() { Stream stream; if (_unbuffered) { stream = UnbufferedFileStream.Create( _filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, false, 1024 * 1024, 4096, false, 4096); } else { stream = new FileStream( _filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, ReadBufferSize, FileOptions.RandomAccess); } var reader = new BinaryReader(stream); return(new ReaderWorkItem(stream, reader, false)); }
public void seek_current_unimplemented() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { Assert.Throws <NotImplementedException>(() => stream.Seek(0, SeekOrigin.Current)); } }
private async ValueTask WriteAsync(string basePath, OmniHash hash, ReadOnlyMemory <byte> memory) { var filePath = Path.Combine(basePath, this.OmniHashToFilePath(hash)); using (var fileStream = new UnbufferedFileStream(filePath, FileMode.Create, FileAccess.ReadWrite, FileShare.None, FileOptions.None, _bytesPool)) { await fileStream.WriteAsync(memory); } }
public void seek_origin_end_to_mid_of_file() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Seek(-30, SeekOrigin.End); Assert.AreEqual(stream.Length - 30, stream.Position); } }
public static T Load <T>(string directoryPath, string name, Func <T> defaultValueFactory) where T : RocketPackMessageBase <T> { if (!Directory.Exists(directoryPath)) { Directory.CreateDirectory(directoryPath); } foreach (string extension in new string[] { ".rpk.gz", ".rpk.gz.bak" }) { string configPath = Path.Combine(directoryPath, name + extension); var hub = new Hub(); try { if (!File.Exists(configPath)) { continue; } using (var fileStream = new UnbufferedFileStream(configPath, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, BufferPool.Shared)) using (var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress)) { for (; ;) { var readLength = gzipStream.Read(hub.Writer.GetSpan(1024 * 4)); if (readLength < 0) { break; } hub.Writer.Advance(readLength); } } hub.Writer.Complete(); var result = RocketPackMessageBase <T> .Import(hub.Reader.GetSequence(), BufferPool.Shared); hub.Reader.Complete(); return(result); } catch (Exception e) { _logger.Error(e); } finally { hub.Reset(); } } return(defaultValueFactory()); }
public void when_resizing_a_file() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096); stream.SetLength(4096 * 1024); stream.Close(); Assert.AreEqual(4096 * 1024, new FileInfo(filename).Length); }
public void when_writing_less_than_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(255); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(bytes.Length, stream.Position); Assert.AreEqual(0, new FileInfo(filename).Length); } }
public void when_reading_on_aligned_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 20000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var read = new byte[4096]; stream.Read(read, 0, 4096); for (var i = 0; i < 4096; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void when_seeking_greater_than_2gb() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var GIGABYTE = 1024L * 1024L * 1024L; try { using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.SetLength(4L * GIGABYTE); stream.Seek(3L * GIGABYTE, SeekOrigin.Begin); } } finally { File.Delete(filename); } }
public void when_reading_on_unaligned_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 20000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Seek(15, SeekOrigin.Begin); var read = new byte[999]; stream.Read(read, 0, read.Length); for (var i = 0; i < read.Length; i++) { Assert.AreEqual((i + 15) % 256, read[i]); } } }
public void when_writing_more_than_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(9000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); Assert.AreEqual(4096 * 2, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 4096 * 2; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void when_reading_multiple_times_no_seek() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 20000); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var read = new byte[1000]; stream.Read(read, 0, 500); Assert.AreEqual(500, stream.Position); stream.Read(read, 500, 500); Assert.AreEqual(1000, stream.Position); for (var i = 0; i < read.Length; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void seek_write_seek_read_in_buffer() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { var buffer = GetBytes(255); stream.Seek(4096 + 15, SeekOrigin.Begin); stream.Write(buffer, 0, buffer.Length); stream.Seek(4096 + 15, SeekOrigin.Begin); var read = new byte[255]; stream.Read(read, 0, read.Length); for (var i = 0; i < read.Length; i++) { Assert.AreEqual(i % 255, read[i]); } } }
public void when_writing_less_than_buffer_and_seeking() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var bytes = GetBytes(255); using (var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Write(bytes, 0, bytes.Length); stream.Seek(0, SeekOrigin.Begin); Assert.AreEqual(0, stream.Position); Assert.AreEqual(4096, new FileInfo(filename).Length); var read = ReadAllBytesShared(filename); for (var i = 0; i < 255; i++) { Assert.AreEqual(i % 256, read[i]); } } }
public void when_expanding_an_aligned_file_by_one_byte() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); var stream = UnbufferedFileStream.Create(filename, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096); var initialFileSize = 4096 * 1024; stream.SetLength(initialFileSize); //initial size of 4MB stream.Seek(0, SeekOrigin.End); Assert.AreEqual(initialFileSize, stream.Position); //verify position stream.SetLength(initialFileSize + 1); //expand file by 1 byte Assert.AreEqual(initialFileSize, stream.Position); //position should not change stream.Close(); Assert.AreEqual(initialFileSize + 4096, new FileInfo(filename).Length); //file size should increase by 4KB }
public void when_seeking_non_exact_to_zero_block_and_writing() { var filename = GetFilePathFor(Guid.NewGuid().ToString()); MakeFile(filename, 4096 * 64); var bytes = GetBytes(512); using (var stream = UnbufferedFileStream.Create(filename, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite, false, 4096, 4096, false, 4096)) { stream.Seek(128, SeekOrigin.Begin); stream.Write(bytes, 0, bytes.Length); stream.Flush(); } using (var stream = new FileStream(filename, FileMode.Open)) { var read = new byte[128]; stream.Read(read, 0, 128); for (var i = 0; i < read.Length; i++) { Assert.AreEqual(i, read[i]); } } }
public ArraySegment <byte> GetBlock(Hash hash) { // Cache { var result = _blocksManager.Get(hash); if (result != null) { return(result.Value); } } // Share { ArraySegment <byte>?result = null; string path = null; lock (_lockObject) { var shareInfo = _contentInfoManager.GetShareInfo(hash); if (shareInfo != null) { var buffer = _bufferManager.TakeBuffer(shareInfo.BlockLength); try { int length; try { using (var stream = new UnbufferedFileStream(shareInfo.Path, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferManager)) { stream.Seek((long)shareInfo.GetIndex(hash) * shareInfo.BlockLength, SeekOrigin.Begin); length = (int)Math.Min(stream.Length - stream.Position, shareInfo.BlockLength); stream.Read(buffer, 0, length); } } catch (ArgumentOutOfRangeException) { throw new BlockNotFoundException(); } catch (IOException) { throw new BlockNotFoundException(); } result = new ArraySegment <byte>(buffer, 0, length); path = shareInfo.Path; } catch (Exception) { _bufferManager.ReturnBuffer(buffer); throw; } } } if (result != null) { if (hash.Algorithm == HashAlgorithm.Sha256 && Unsafe.Equals(Sha256.Compute(result.Value), hash.Value)) { return(result.Value); } else { _bufferManager.ReturnBuffer(result.Value.Array); result = null; this.RemoveContent(path); } } } throw new BlockNotFoundException(); }
public async ValueTask <XeusClue> Import(string path, CancellationToken token = default) { if (path == null) { throw new ArgumentNullException(nameof(path)); } return(await Task.Run(async() => { // Check lock (_lockObject) { var info = _contentMetadataStorage.GetFileContentMetadata(path); if (info != null) { return info.Clue; } } XeusClue?clue = null; var lockedHashes = new HashSet <OmniHash>(); SharedBlocksMetadata?sharedBlocksInfo = null; { const int blockLength = 1024 * 1024; const OmniHashAlgorithmType hashAlgorithmType = OmniHashAlgorithmType.Sha2_256; const CorrectionAlgorithmType correctionAlgorithmType = CorrectionAlgorithmType.ReedSolomon8; byte depth = 0; var merkleTreeSectionList = new List <MerkleTreeSection>(); // File using (var stream = new UnbufferedFileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferPool)) { if (stream.Length <= blockLength) { OmniHash hash; using (var bufferMemoryOwner = _bufferPool.Rent((int)stream.Length)) { stream.Read(bufferMemoryOwner.Memory.Span); if (hashAlgorithmType == OmniHashAlgorithmType.Sha2_256) { hash = new OmniHash(OmniHashAlgorithmType.Sha2_256, Sha2_256.ComputeHash(bufferMemoryOwner.Memory.Span)); } } sharedBlocksInfo = new SharedBlocksMetadata(path, (ulong)stream.Length, (uint)stream.Length, new OmniHash[] { hash }); clue = new XeusClue(hash, depth); } else { var sharedHashes = new List <OmniHash>(); for (; ;) { var targetHashes = new List <OmniHash>(); var targetMemoryOwners = new List <IMemoryOwner <byte> >(); ulong sumLength = 0; try { for (int i = 0; stream.Position < stream.Length; i++) { token.ThrowIfCancellationRequested(); uint length = (uint)Math.Min(stream.Length - stream.Position, blockLength); var bufferMemoryOwner = _bufferPool.Rent((int)length); try { stream.Read(bufferMemoryOwner.Memory.Span); sumLength += length; } catch (Exception e) { bufferMemoryOwner.Dispose(); throw e; } OmniHash hash; if (hashAlgorithmType == OmniHashAlgorithmType.Sha2_256) { hash = new OmniHash(OmniHashAlgorithmType.Sha2_256, Sha2_256.ComputeHash(bufferMemoryOwner.Memory.Span)); } sharedHashes.Add(hash); targetHashes.Add(hash); targetMemoryOwners.Add(bufferMemoryOwner); if (targetMemoryOwners.Count >= 128) { break; } } var parityHashes = await this.ParityEncode(targetMemoryOwners.Select(n => n.Memory), hashAlgorithmType, correctionAlgorithmType, token); lockedHashes.UnionWith(parityHashes); merkleTreeSectionList.Add(new MerkleTreeSection(correctionAlgorithmType, sumLength, CollectionHelper.Unite(targetHashes, parityHashes).ToArray())); } finally { foreach (var memoryOwner in targetMemoryOwners) { memoryOwner.Dispose(); } } if (stream.Position == stream.Length) { break; } } sharedBlocksInfo = new SharedBlocksMetadata(path, (ulong)stream.Length, blockLength, sharedHashes.ToArray()); depth++; } } while (merkleTreeSectionList.Count > 0) { // Index using (var stream = new RecyclableMemoryStream(_bufferPool)) { RocketPackHelper.MessageToStream(new MerkleTreeNode(merkleTreeSectionList.ToArray()), stream); stream.Seek(0, SeekOrigin.Begin); merkleTreeSectionList.Clear(); if (stream.Length <= blockLength) { OmniHash hash; using (var bufferMemoryOwner = _bufferPool.Rent((int)stream.Length)) { stream.Read(bufferMemoryOwner.Memory.Span); if (hashAlgorithmType == OmniHashAlgorithmType.Sha2_256) { hash = new OmniHash(OmniHashAlgorithmType.Sha2_256, Sha2_256.ComputeHash(bufferMemoryOwner.Memory.Span)); } _blockStorage.Lock(hash); bool result = _blockStorage.TrySet(hash, bufferMemoryOwner.Memory.Span); if (!result) { throw new ImportFailed("Failed to save Block."); } lockedHashes.Add(hash); } clue = new XeusClue(hash, depth); } else { for (; ;) { var targetHashes = new List <OmniHash>(); var targetMemoryOwners = new List <IMemoryOwner <byte> >(); ulong sumLength = 0; try { for (int i = 0; stream.Position < stream.Length; i++) { token.ThrowIfCancellationRequested(); uint length = (uint)Math.Min(stream.Length - stream.Position, blockLength); var bufferMemoryOwner = _bufferPool.Rent((int)length); try { stream.Read(bufferMemoryOwner.Memory.Span); sumLength += length; } catch (Exception e) { bufferMemoryOwner.Dispose(); throw e; } OmniHash hash; if (hashAlgorithmType == OmniHashAlgorithmType.Sha2_256) { hash = new OmniHash(OmniHashAlgorithmType.Sha2_256, Sha2_256.ComputeHash(bufferMemoryOwner.Memory.Span)); } _blockStorage.Lock(hash); bool result = _blockStorage.TrySet(hash, bufferMemoryOwner.Memory.Span); if (!result) { throw new ImportFailed("Failed to save Block."); } lockedHashes.Add(hash); targetHashes.Add(hash); targetMemoryOwners.Add(bufferMemoryOwner); if (targetMemoryOwners.Count >= 128) { break; } } var parityHashes = await this.ParityEncode(targetMemoryOwners.Select(n => n.Memory), hashAlgorithmType, correctionAlgorithmType, token); lockedHashes.UnionWith(parityHashes); merkleTreeSectionList.Add(new MerkleTreeSection(correctionAlgorithmType, sumLength, CollectionHelper.Unite(targetHashes, parityHashes).ToArray())); } finally { foreach (var memoryOwner in targetMemoryOwners) { memoryOwner.Dispose(); } } if (stream.Position == stream.Length) { break; } } depth++; } } } } if (clue == null) { throw new ImportFailed("clue is null"); } lock (_lockObject) { if (!_contentMetadataStorage.ContainsFileContentMetadata(path)) { _contentMetadataStorage.Add(new ContentMetadata(clue, lockedHashes.ToArray(), sharedBlocksInfo)); foreach (var hash in lockedHashes) { _blockStorage.Lock(hash); } } } return clue; }, token)); }
public bool TryGetBlock(OmniHash hash, [NotNullWhen(true)] out IMemoryOwner <byte>?memoryOwner) { if (!EnumHelper.IsValid(hash.AlgorithmType)) { throw new ArgumentException($"Incorrect HashAlgorithmType: {hash.AlgorithmType}"); } // Cache { var result = _blockStorage.TryGet(hash, out memoryOwner); if (result) { return(true); } } bool success = false; string?path = null; // Share try { lock (_lockObject) { var sharedBlocksInfo = _contentMetadataStorage.GetSharedBlocksInfo(hash); if (sharedBlocksInfo != null) { ulong position = (ulong)sharedBlocksInfo.GetIndex(hash) * sharedBlocksInfo.BlockLength; uint length = (uint)Math.Min(sharedBlocksInfo.Length - position, sharedBlocksInfo.BlockLength); memoryOwner = _bufferPool.Rent((int)length); try { using (var stream = new UnbufferedFileStream(sharedBlocksInfo.Path, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferPool)) { stream.Seek((long)position, SeekOrigin.Begin); stream.Read(memoryOwner.Memory.Span); } path = sharedBlocksInfo.Path; } catch (Exception e) { _logger.Debug(e); return(false); } } } if (memoryOwner == null) { return(false); } if (hash.AlgorithmType == OmniHashAlgorithmType.Sha2_256 && BytesOperations.SequenceEqual(Sha2_256.ComputeHash(memoryOwner.Memory.Span), hash.Value.Span)) { success = true; return(true); } else { _logger.Warn("Broken block."); return(false); } } finally { if (!success) { if (memoryOwner != null) { memoryOwner.Dispose(); memoryOwner = null; } if (path != null) { this.RemoveContent(path); } } } }
internal Midpoint[] CacheMidpointsAndVerifyHash(int depth) { var buffer = new byte[4096]; if (depth < 0 || depth > 30) { throw new ArgumentOutOfRangeException("depth"); } var count = Count; if (count == 0 || depth == 0) { return(null); } #if __MonoCS__ var workItem = GetWorkItem(); var stream = workItem.Stream; try { #else using (var stream = UnbufferedFileStream.Create(_filename, FileMode.Open, FileAccess.Read, FileShare.Read, false, 4096, 4096, false, 4096)) { #endif try { int midpointsCount; Midpoint[] midpoints; using (MD5 md5 = MD5.Create()) { try { midpointsCount = (int)Math.Max(2L, Math.Min((long)1 << depth, count)); midpoints = new Midpoint[midpointsCount]; } catch (OutOfMemoryException exc) { throw new PossibleToHandleOutOfMemoryException("Failed to allocate memory for Midpoint cache.", exc); } stream.Seek(0, SeekOrigin.Begin); stream.Read(buffer, 0, PTableHeader.Size); md5.TransformBlock(buffer, 0, PTableHeader.Size, null, 0); long previousNextIndex = long.MinValue; var previousKey = new IndexEntryKey(long.MaxValue, int.MaxValue); for (long k = 0; k < midpointsCount; ++k) { var nextIndex = (long)k * (count - 1) / (midpointsCount - 1); if (previousNextIndex != nextIndex) { ReadUntilWithMd5(PTableHeader.Size + _indexEntrySize * nextIndex, stream, md5); stream.Read(buffer, 0, _indexKeySize); md5.TransformBlock(buffer, 0, _indexKeySize, null, 0); IndexEntryKey key; if (_version == PTableVersions.Index32Bit) { key = new IndexEntryKey(BitConverter.ToUInt32(buffer, 4), BitConverter.ToInt32(buffer, 0)); } else { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 4), BitConverter.ToInt32(buffer, 0)); } midpoints[k] = new Midpoint(key, nextIndex); previousNextIndex = nextIndex; previousKey = key; } else { midpoints[k] = new Midpoint(previousKey, previousNextIndex); } } ReadUntilWithMd5(stream.Length - MD5Size, stream, md5); //verify hash (should be at stream.length - MD5Size) md5.TransformFinalBlock(Empty.ByteArray, 0, 0); var fileHash = new byte[MD5Size]; stream.Read(fileHash, 0, MD5Size); ValidateHash(md5.Hash, fileHash); return(midpoints); } } catch { Dispose(); throw; } } #if __MonoCS__ finally { ReturnWorkItem(workItem); } #endif }
public Task <Metadata> Import(string path, DateTime creationTime, CancellationToken token) { if (path == null) { throw new ArgumentNullException(nameof(path)); } return(Task.Run(() => { // Check lock (_lockObject) { var info = _contentInfoManager.GetFileContentInfo(path); if (info != null) { return info.Metadata; } } Metadata metadata = null; var lockedHashes = new HashSet <Hash>(); ShareInfo shareInfo = null; { const int blockLength = 1024 * 1024; const HashAlgorithm hashAlgorithm = HashAlgorithm.Sha256; const CorrectionAlgorithm correctionAlgorithm = CorrectionAlgorithm.ReedSolomon8; int depth = 0; var groupList = new List <Group>(); // File using (var stream = new UnbufferedFileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read, FileOptions.None, _bufferManager)) { if (stream.Length <= blockLength) { Hash hash; using (var safeBuffer = _bufferManager.CreateSafeBuffer(blockLength)) { int length = (int)stream.Length; stream.Read(safeBuffer.Value, 0, length); if (hashAlgorithm == HashAlgorithm.Sha256) { hash = new Hash(HashAlgorithm.Sha256, Sha256.Compute(safeBuffer.Value, 0, length)); } } shareInfo = new ShareInfo(path, stream.Length, (int)stream.Length, new Hash[] { hash }); metadata = new Metadata(depth, hash); } else { var sharedHashes = new List <Hash>(); for (; ;) { var targetHashes = new List <Hash>(); var targetBuffers = new List <ArraySegment <byte> >(); long sumLength = 0; try { for (int i = 0; stream.Position < stream.Length; i++) { token.ThrowIfCancellationRequested(); var buffer = new ArraySegment <byte>(); try { int length = (int)Math.Min(stream.Length - stream.Position, blockLength); buffer = new ArraySegment <byte>(_bufferManager.TakeBuffer(length), 0, length); stream.Read(buffer.Array, 0, length); sumLength += length; } catch (Exception) { if (buffer.Array != null) { _bufferManager.ReturnBuffer(buffer.Array); } throw; } Hash hash; if (hashAlgorithm == HashAlgorithm.Sha256) { hash = new Hash(HashAlgorithm.Sha256, Sha256.Compute(buffer)); } sharedHashes.Add(hash); targetHashes.Add(hash); targetBuffers.Add(buffer); if (targetBuffers.Count >= 128) { break; } } var parityHashes = this.ParityEncoding(targetBuffers, hashAlgorithm, correctionAlgorithm, token); lockedHashes.UnionWith(parityHashes); groupList.Add(new Group(correctionAlgorithm, sumLength, CollectionUtils.Unite(targetHashes, parityHashes).ToArray())); } finally { foreach (var buffer in targetBuffers) { if (buffer.Array == null) { continue; } _bufferManager.ReturnBuffer(buffer.Array); } } if (stream.Position == stream.Length) { break; } } shareInfo = new ShareInfo(path, stream.Length, blockLength, sharedHashes); depth++; } } while (groupList.Count > 0) { // Index using (var stream = (new Index(groupList)).Export(_bufferManager)) { groupList.Clear(); if (stream.Length <= blockLength) { Hash hash; using (var safeBuffer = _bufferManager.CreateSafeBuffer(blockLength)) { int length = (int)stream.Length; stream.Read(safeBuffer.Value, 0, length); if (hashAlgorithm == HashAlgorithm.Sha256) { hash = new Hash(HashAlgorithm.Sha256, Sha256.Compute(safeBuffer.Value, 0, length)); } _blocksManager.Lock(hash); _blocksManager.Set(hash, new ArraySegment <byte>(safeBuffer.Value, 0, length)); lockedHashes.Add(hash); } metadata = new Metadata(depth, hash); } else { for (; ;) { var targetHashes = new List <Hash>(); var targetBuffers = new List <ArraySegment <byte> >(); long sumLength = 0; try { for (int i = 0; stream.Position < stream.Length; i++) { token.ThrowIfCancellationRequested(); var buffer = new ArraySegment <byte>(); try { int length = (int)Math.Min(stream.Length - stream.Position, blockLength); buffer = new ArraySegment <byte>(_bufferManager.TakeBuffer(length), 0, length); stream.Read(buffer.Array, 0, length); sumLength += length; } catch (Exception) { if (buffer.Array != null) { _bufferManager.ReturnBuffer(buffer.Array); } throw; } Hash hash; if (hashAlgorithm == HashAlgorithm.Sha256) { hash = new Hash(HashAlgorithm.Sha256, Sha256.Compute(buffer)); } _blocksManager.Lock(hash); _blocksManager.Set(hash, buffer); lockedHashes.Add(hash); targetHashes.Add(hash); targetBuffers.Add(buffer); if (targetBuffers.Count >= 128) { break; } } var parityHashes = this.ParityEncoding(targetBuffers, hashAlgorithm, correctionAlgorithm, token); lockedHashes.UnionWith(parityHashes); groupList.Add(new Group(correctionAlgorithm, sumLength, CollectionUtils.Unite(targetHashes, parityHashes).ToArray())); } finally { foreach (var buffer in targetBuffers) { if (buffer.Array == null) { continue; } _bufferManager.ReturnBuffer(buffer.Array); } } if (stream.Position == stream.Length) { break; } } depth++; } } } } lock (_lockObject) { if (!_contentInfoManager.ContainsFileContentInfo(path)) { _contentInfoManager.Add(new ContentInfo(creationTime, Timeout.InfiniteTimeSpan, metadata, lockedHashes, shareInfo)); foreach (var hash in lockedHashes) { _blocksManager.Lock(hash); } } } return metadata; }, token)); }
internal Midpoint[] CacheMidpointsAndVerifyHash(int depth, bool skipIndexVerify) { var buffer = new byte[4096]; if (depth < 0 || depth > 30) { throw new ArgumentOutOfRangeException("depth"); } var count = Count; if (count == 0 || depth == 0) { return(null); } if (skipIndexVerify) { Log.Debug("Disabling Verification of PTable"); } Stream stream = null; WorkItem workItem = null; if (Runtime.IsUnixOrMac) { workItem = GetWorkItem(); stream = workItem.Stream; } else { stream = UnbufferedFileStream.Create(_filename, FileMode.Open, FileAccess.Read, FileShare.Read, false, 4096, 4096, false, 4096); } try { int midpointsCount; Midpoint[] midpoints; using (MD5 md5 = MD5.Create()) { try { midpointsCount = (int)Math.Max(2L, Math.Min((long)1 << depth, count)); midpoints = new Midpoint[midpointsCount]; } catch (OutOfMemoryException exc) { throw new PossibleToHandleOutOfMemoryException("Failed to allocate memory for Midpoint cache.", exc); } if (skipIndexVerify && (_version >= PTableVersions.IndexV4)) { if (_midpointsCached == midpointsCount) { //index verification is disabled and cached midpoints with the same depth requested are available //so, we can load them directly from the PTable file Log.Debug("Loading {midpointsCached} cached midpoints from PTable", _midpointsCached); long startOffset = stream.Length - MD5Size - PTableFooter.GetSize(_version) - _midpointsCacheSize; stream.Seek(startOffset, SeekOrigin.Begin); for (uint k = 0; k < _midpointsCached; k++) { stream.Read(buffer, 0, _indexEntrySize); IndexEntryKey key; long index; if (_version == PTableVersions.IndexV4) { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 8), BitConverter.ToInt64(buffer, 0)); index = BitConverter.ToInt64(buffer, 8 + 8); } else { throw new InvalidOperationException("Unknown PTable version: " + _version); } midpoints[k] = new Midpoint(key, index); if (k > 0) { if (midpoints[k].Key.GreaterThan(midpoints[k - 1].Key)) { throw new CorruptIndexException(String.Format( "Index entry key for midpoint {0} (stream: {1}, version: {2}) < index entry key for midpoint {3} (stream: {4}, version: {5})", k - 1, midpoints[k - 1].Key.Stream, midpoints[k - 1].Key.Version, k, midpoints[k].Key.Stream, midpoints[k].Key.Version)); } else if (midpoints[k - 1].ItemIndex > midpoints[k].ItemIndex) { throw new CorruptIndexException(String.Format( "Item index for midpoint {0} ({1}) > Item index for midpoint {2} ({3})", k - 1, midpoints[k - 1].ItemIndex, k, midpoints[k].ItemIndex)); } } } return(midpoints); } else { Log.Debug( "Skipping loading of cached midpoints from PTable due to count mismatch, cached midpoints: {midpointsCached} / required midpoints: {midpointsCount}", _midpointsCached, midpointsCount); } } if (!skipIndexVerify) { stream.Seek(0, SeekOrigin.Begin); stream.Read(buffer, 0, PTableHeader.Size); md5.TransformBlock(buffer, 0, PTableHeader.Size, null, 0); } long previousNextIndex = long.MinValue; var previousKey = new IndexEntryKey(long.MaxValue, long.MaxValue); for (long k = 0; k < midpointsCount; ++k) { long nextIndex = GetMidpointIndex(k, count, midpointsCount); if (previousNextIndex != nextIndex) { if (!skipIndexVerify) { ReadUntilWithMd5(PTableHeader.Size + _indexEntrySize * nextIndex, stream, md5); stream.Read(buffer, 0, _indexKeySize); md5.TransformBlock(buffer, 0, _indexKeySize, null, 0); } else { stream.Seek(PTableHeader.Size + _indexEntrySize * nextIndex, SeekOrigin.Begin); stream.Read(buffer, 0, _indexKeySize); } IndexEntryKey key; if (_version == PTableVersions.IndexV1) { key = new IndexEntryKey(BitConverter.ToUInt32(buffer, 4), BitConverter.ToInt32(buffer, 0)); } else if (_version == PTableVersions.IndexV2) { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 4), BitConverter.ToInt32(buffer, 0)); } else { key = new IndexEntryKey(BitConverter.ToUInt64(buffer, 8), BitConverter.ToInt64(buffer, 0)); } midpoints[k] = new Midpoint(key, nextIndex); previousNextIndex = nextIndex; previousKey = key; } else { midpoints[k] = new Midpoint(previousKey, previousNextIndex); } if (k > 0) { if (midpoints[k].Key.GreaterThan(midpoints[k - 1].Key)) { throw new CorruptIndexException(String.Format( "Index entry key for midpoint {0} (stream: {1}, version: {2}) < index entry key for midpoint {3} (stream: {4}, version: {5})", k - 1, midpoints[k - 1].Key.Stream, midpoints[k - 1].Key.Version, k, midpoints[k].Key.Stream, midpoints[k].Key.Version)); } else if (midpoints[k - 1].ItemIndex > midpoints[k].ItemIndex) { throw new CorruptIndexException(String.Format( "Item index for midpoint {0} ({1}) > Item index for midpoint {2} ({3})", k - 1, midpoints[k - 1].ItemIndex, k, midpoints[k].ItemIndex)); } } } if (!skipIndexVerify) { ReadUntilWithMd5(stream.Length - MD5Size, stream, md5); //verify hash (should be at stream.length - MD5Size) md5.TransformFinalBlock(Empty.ByteArray, 0, 0); var fileHash = new byte[MD5Size]; stream.Read(fileHash, 0, MD5Size); ValidateHash(md5.Hash, fileHash); } return(midpoints); } } catch { Dispose(); throw; } finally { if (Runtime.IsUnixOrMac) { if (workItem != null) { ReturnWorkItem(workItem); } } else { if (stream != null) { stream.Dispose(); } } } }