[Trait("Category", "Integration2")] // Using a different category name to allow all integration tests to run in parallel.
        public void Can1024KChunkReallyLargeFiles()
        {
            // We want to make sure this goes past uint.MaxValue == 4GB
            HashType defaultHashType = HashType.Dedup1024K;

            int blockSize  = (DedupNode.MaxDirectChildrenPerNode * 2) * defaultHashType.GetAvgChunkSize(); // ~1GB => 1024 * 1024K
            int blockCount = (int)((uint.MaxValue / (uint)blockSize) + 1);                                 // 3-4 blocks?

            Assert.True(((long)blockSize * (long)blockCount) > (long)uint.MaxValue);

            var node = CanChunkLargeFilesHelper(
                defaultHashType,
                blockSize,
                blockCount,
                "CE6299176DC223B083363ED4DF81646198DD1E4423C676B82196B7FA17031A42");

            var chunks            = node.EnumerateChunkLeafsInOrder().ToList();
            var nodes             = node.EnumerateInnerNodesDepthFirst().ToList();
            int minExpectedChunks = DedupNode.MaxDirectChildrenPerNode * blockCount;

            Assert.True(chunks.Count > minExpectedChunks, $"Expecting at least '{minExpectedChunks}' chunks but got '{chunks.Count}'.");

            int thisLevel     = chunks.Count;
            int expectedNodes = 0;

            while (thisLevel > 1)
            {
                int parentNodesForThisLevel = (thisLevel + DedupNode.MaxDirectChildrenPerNode - 1) / DedupNode.MaxDirectChildrenPerNode;
                expectedNodes += parentNodesForThisLevel;
                thisLevel      = parentNodesForThisLevel;
            }

            Assert.True(nodes.Count <= expectedNodes);
        }
        public void Can64KChunkReallyLargeFiles()
        {
            // We want to make sure this goes past uint.MaxValue == 4GB

            HashType defaultHashType = HashType.Dedup64K;

            int blockSize  = 2 * DedupNode.MaxDirectChildrenPerNode * defaultHashType.GetAvgChunkSize(); // ~64MB
            int blockCount = (int)((uint.MaxValue / (uint)blockSize) + 1);

            Assert.True(((long)blockSize * (long)blockCount) > (long)uint.MaxValue);

            var node = CanChunkLargeFilesHelper(
                defaultHashType,
                blockSize,
                blockCount,
                "A09C8CB4C1B23022C571E75CA143040F9ED8D9A593A7FEECDE2B98725A19E3F5");

            var chunks            = node.EnumerateChunkLeafsInOrder().ToList();
            var nodes             = node.EnumerateInnerNodesDepthFirst().ToList();
            int minExpectedChunks = DedupNode.MaxDirectChildrenPerNode * blockCount;

            Assert.True(chunks.Count > minExpectedChunks, $"Expecting at least '{minExpectedChunks}' chunks but got '{chunks.Count}'.");

            int thisLevel     = chunks.Count;
            int expectedNodes = 0;

            while (thisLevel > 1)
            {
                int parentNodesForThisLevel = (thisLevel + DedupNode.MaxDirectChildrenPerNode - 1) / DedupNode.MaxDirectChildrenPerNode;
                expectedNodes += parentNodesForThisLevel;
                thisLevel      = parentNodesForThisLevel;
            }

            Assert.True(nodes.Count <= expectedNodes);
        }
Beispiel #3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="Dedup1024KHashAlgorithm"/> class.
        /// </summary>
        public Dedup1024KHashAlgorithm(IChunker chunker)
            : base(chunker)
        {
            int expectedAvgChunkSize = TargetHashType.GetAvgChunkSize();

            Contract.Check(chunker.Configuration.AvgChunkSize == expectedAvgChunkSize)?.Assert($"Invalid average chunk size (in bytes) specified: {chunker.Configuration.AvgChunkSize} expected: {expectedAvgChunkSize}");
        }
        [InlineData(HashType.Dedup1024K, DedupNode.MaxDirectChildrenPerNode / 8, 64, 16)]  // 4MB        | 64 * 4 * 1024K      256MB
        public void HashOfChunksInNodeMatchesChunkHashAlgorithmNegative(HashType hashType, int expectedChunkCount, int multiplier, int divider)
        {
            Assert.True(hashType.IsValidDedup(), $"Hash type: {hashType} is not a valid dedup.");
            var config = new ChunkerConfiguration((multiplier * hashType.GetAvgChunkSize()) / divider);

            Assert.Throws <NotImplementedException>(() => HashOfChunksInNodeMatchesChunkHashAlgorithmInner(expectedChunkCount, config, new ManagedChunker(config)));

            if (Chunker.IsComChunkerSupported &&
                config.AvgChunkSize == ChunkerConfiguration.SupportedComChunkerConfiguration.AvgChunkSize &&
                hashType == HashType.Dedup64K) // No COMchunker support for any other chunk sizes.
            {
                Assert.Throws <NotImplementedException>(() => HashOfChunksInNodeMatchesChunkHashAlgorithmInner(expectedChunkCount, config, new ComChunker(config)));
            }
        }
Beispiel #5
0
 /// <nodoc />
 public static ChunkerConfiguration GetChunkerConfiguration(this HashType hashType)
 {
     return(hashType.IsValidDedup() ?
            new ChunkerConfiguration(hashType.GetAvgChunkSize()) :
            throw new NotImplementedException($"Unsupported enum {hashType} of type {nameof(HashType)} encountered."));
 }