public void HashTest() { HashTreeNode node = new HashTreeNode(); node.Hash = TestVectors.h("ABCDEF"); CustomAssert.AreEqual(TestVectors.h("ABCDEF"), node.Hash); }
public void RangeEndTest() { HashTreeNode node = new HashTreeNode(); node.RangeEnd = 5; Assert.AreEqual(5, node.RangeEnd); }
public void RangeStartTest() { HashTreeNode node = new HashTreeNode(); node.RangeStart = 5; Assert.AreEqual(5, node.RangeStart); }
public void RightLeftTest() { HashTreeNode node = new HashTreeNode(); HashTreeNode test = new HashTreeNode(); node.Right = test; node.Left = test; }
public void LeftParentTest() { HashTreeNode node = new HashTreeNode(); HashTreeNode test = new HashTreeNode(); node.Left = test; node.Parent = test; }
public void RightTest() { HashTreeNode node = new HashTreeNode(); HashTreeNode test = new HashTreeNode(); node.Right = test; Assert.AreEqual(test, node.Right); }
public void LeftTest() { HashTreeNode node = new HashTreeNode(); HashTreeNode test = new HashTreeNode(); node.Left = test; Assert.AreEqual(test, node.Left); }
public void ParentTest() { HashTreeNode node = new HashTreeNode(); HashTreeNode test = new HashTreeNode(); node.Parent = test; Assert.AreEqual(test, node.Parent); }
public void RightParentTest() { HashTreeNode node = new HashTreeNode(); HashTreeNode test = new HashTreeNode(); node.Right = test; node.Parent = test; }
/// <summary> /// Initializes a new instance of the <see cref="HashTree{T}" /> class. /// </summary> /// <param name="hashingProcessor"> /// A processor that produces hash values. /// </param> /// <param name="algorithm"> /// The algorithm used to produce hash values. The default value is <see cref="HashingAlgorithmSpecification.ShaTwo256" />. /// </param> /// <param name="blocks"> /// An ordered collection of data block objects underlying the tree. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="hashingProcessor" /> is <see langword="null" /> -or- <paramref name="blocks" /> is /// <see langword="null" />. /// </exception> /// <exception cref="ArgumentOutOfRangeException"> /// <paramref name="algorithm" /> is equal to <see cref="HashingAlgorithmSpecification.Unspecified" />. /// </exception> /// <exception cref="SecurityException"> /// An exception was raised during hashing or serialization. /// </exception> public HashTree(IHashingProcessor <TBlock> hashingProcessor, HashingAlgorithmSpecification algorithm, IEnumerable <TBlock> blocks) : base() { Algorithm = algorithm.RejectIf().IsEqualToValue(HashingAlgorithmSpecification.Unspecified, nameof(algorithm)); HashingProcessor = hashingProcessor.RejectIf().IsNull(nameof(hashingProcessor)).TargetArgument; LeafNodes = new List <HashTreeNode>(); RootNode = new HashTreeNode(); AddBlockRange(blocks); }
public void Constructor5Test() { HashTreeNode node = new HashTreeNode(TestVectors.h("ABCDEF"), 1, 2); CustomAssert.AreEqual(TestVectors.h("ABCDEF"), node.Hash); Assert.IsNull(node.Parent); Assert.IsNull(node.Left); Assert.IsNull(node.Right); Assert.AreEqual(1, node.RangeStart); Assert.AreEqual(2, node.RangeEnd); }
public void Constructor1Test() { HashTreeNode node = new HashTreeNode(); Assert.IsNull(node.Hash); Assert.IsNull(node.Parent); Assert.IsNull(node.Left); Assert.IsNull(node.Right); Assert.AreEqual(0, node.RangeStart); Assert.AreEqual(0, node.RangeEnd); }
private void PermuteRow(IEnumerable <HashTreeNode> row) { var rowCount = row.Count(); if (rowCount == 1) { RootNode = row.First(); return; } var pairCount = (rowCount / 2); var pairCountModulus = (rowCount % 2); var parentRowLength = (pairCount + pairCountModulus); var parentRow = new HashTreeNode[parentRowLength]; for (var i = 0; i < pairCount; i++) { var pair = row.Skip(i * 2).Take(2); var leftChild = pair.First(); var rightChild = pair.Last(); if (leftChild.IsRoot) { parentRow[i] = CreateNode(leftChild, rightChild); } else if (ReferenceEquals(leftChild.Parent, rightChild.Parent)) { parentRow[i] = leftChild.Parent as HashTreeNode; } else { ((HashTreeNode)leftChild.Parent).Destroy(); parentRow[i] = CreateNode(leftChild, rightChild); } } if (pairCountModulus == 1) { // Carry the leftover node up to the next row. parentRow[parentRowLength - 1] = row.Last(); } PermuteRow(parentRow); }
/// <summary> /// Store this node and its descendents in a blobstore /// Breaks with circular references, but these should only occur /// with hard- (and soft-?) -links TODO: handle links /// </summary> /// <param name="blobs"></param> /// <param name="storeGetHash">Function called to store node data, returns hash</param> /// <returns>The hash of the stored tree and a tree of its child hashes.</returns> public (byte[] nodehash, HashTreeNode node) Store(Func <byte[], byte[]> storeGetHash) { List <(byte[] nodehash, HashTreeNode?node)> children = new List <(byte[] nodehash, HashTreeNode?node)>(); List <byte[]> dirhashes = new List <byte[]>(); foreach (MetadataNode dir in Directories.Values) { var(newhash, newnode) = dir.Store(storeGetHash); dirhashes.Add(newhash); children.Add((newhash, newnode)); } foreach (var fm in Files.Values.AsEnumerable()) { if (fm.FileHash == null) { throw new NullReferenceException("Stored filehashes cannot be null"); } children.Add((fm.FileHash, null)); } Dictionary <string, byte[]> mtdata = new Dictionary <string, byte[]>(); // -"-v1" // DirMetadata = FileMetadata DirMetadata.serialize() // Directories = enum_encode([Directories.Values MetadataNode.serialize(),... ]) // Files = enum_encode([Files.Values FileMetadata.serialize(),... ]) // -"-v2" // Directories = enum_encode([dirrefs,...]) // "-v3" // DirectoriesMultiblock = enum_encode([BitConverter.GetBytes(multiblock),...]) // -v4 // removed DirectoriesMultiblock mtdata.Add("DirMetadata-v1", DirMetadata.serialize()); mtdata.Add("Files-v1", BinaryEncoding.enum_encode(Files.Values.AsEnumerable() .Select(fm => fm.serialize()))); mtdata.Add("Directories-v2", BinaryEncoding.enum_encode(dirhashes)); byte[] nodehash = storeGetHash(BinaryEncoding.dict_encode(mtdata)); HashTreeNode node = new HashTreeNode(children); return(nodehash, node); }
public void FinalizeBackupAddition(string bsname, byte[] backuphash, byte[] mtreehash, HashTreeNode mtreereferences) { BlobLocation backupblocation = GetBlobLocation(backuphash); if (!backupblocation.BSetReferenceCounts.ContainsKey(bsname) || backupblocation.BSetReferenceCounts[bsname] == 0) { BlobLocation mtreeblocation = GetBlobLocation(mtreehash); if (!backupblocation.BSetReferenceCounts.ContainsKey(bsname) || mtreeblocation.BSetReferenceCounts[bsname] == 0) { ISkippableChildrenIterator <byte[]> childReferences = mtreereferences.GetChildIterator(); foreach (var blobhash in childReferences) { BlobLocation blocation = GetBlobLocation(blobhash); if (backupblocation.BSetReferenceCounts.ContainsKey(bsname) && blocation.BSetReferenceCounts[bsname] > 0) // This was already stored { childReferences.SkipChildren(); } else if (blocation.BlockHashes != null) { foreach (var mbref in blocation.BlockHashes) { IncrementReferenceCountNoRecurse(bsname, mbref, 1); } } IncrementReferenceCountNoRecurse(bsname, blocation, blobhash, 1); } IncrementReferenceCountNoRecurse(bsname, mtreeblocation, mtreehash, 1); } IncrementReferenceCountNoRecurse(bsname, backupblocation, backuphash, 1); } }
internal HashTreeNode(Byte[] value, HashTreeNode leftChild, HashTreeNode rightChild) : base(value, leftChild, rightChild) { return; }
public void BadRangeStartTest(int value) { HashTreeNode node = new HashTreeNode(); node.RangeStart = value; }
public void BadRangeEndTest(int value) { HashTreeNode node = new HashTreeNode(); node.RangeEnd = value; }
private HashTreeNode CreateNode(HashTreeNode leftChild, HashTreeNode rightChild) => new HashTreeNode(CalculateHash(leftChild.Value, rightChild.Value), leftChild, rightChild);
public HashTreeNodeSkippableChildrenIterator(HashTreeNode node) { Node = node; }
public void FinalizeBackupAddition(BackupSetReference bsname, byte[] backuphash, byte[] mtreehash, HashTreeNode mtreereferences) { BlobLocation backupblocation = GetBlobLocation(backuphash); int? backupRefCount = backupblocation.GetBSetReferenceCount(bsname); if (!backupRefCount.HasValue || backupRefCount == 0) { BlobLocation mtreeblocation = GetBlobLocation(mtreehash); int? mtreeRefCount = mtreeblocation.GetBSetReferenceCount(bsname); if (!mtreeRefCount.HasValue || mtreeRefCount == 0) { ISkippableChildrenIterator <byte[]> childReferences = mtreereferences.GetChildIterator(); foreach (var blobhash in childReferences) { BlobLocation blocation = GetBlobLocation(blobhash); int? refCount = blocation.GetBSetReferenceCount(bsname); if (refCount.HasValue && refCount > 0) // This was already stored { childReferences.SkipChildrenOfCurrent(); } else if (blocation.BlockHashes != null) { foreach (var mbref in blocation.BlockHashes) { IncrementReferenceCountNoRecurse(bsname, mbref, 1); } } IncrementReferenceCountNoRecurse(bsname, blocation, blobhash, 1); } IncrementReferenceCountNoRecurse(bsname, mtreeblocation, mtreehash, 1); } IncrementReferenceCountNoRecurse(bsname, backupblocation, backuphash, 1); } }
public void CountTest(long start, long end, long count) { HashTreeNode node = new HashTreeNode(null, start, end); Assert.AreEqual(count, node.Count); }