public void EnumerateItemsAllocatesChunks() { var jobResponse = Stubs.BuildJobResponse( Stubs.Chunk1(null, false, false), Stubs.Chunk2(Stubs.NodeId2, true, false), Stubs.Chunk3(null, false, false) ); var node1Client = new Mock <IDs3Client>(MockBehavior.Strict).Object; var node2Client = new Mock <IDs3Client>(MockBehavior.Strict).Object; var clientFactory = new Mock <IDs3ClientFactory>(MockBehavior.Strict); clientFactory.Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1)).Returns(node1Client); clientFactory.Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2)).Returns(node2Client); var client = new Mock <IDs3Client>(MockBehavior.Strict); client.Setup(c => c.BuildFactory(Stubs.Nodes)).Returns(clientFactory.Object); client .SetupSequence(c => c.AllocateJobChunk(Allocate(Stubs.ChunkId1))) .Returns(AllocateJobChunkResponse.RetryAfter(TimeSpan.FromMinutes(5))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk1(Stubs.NodeId1, false, false))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk1(Stubs.NodeId1, true, false))) .Returns(AllocateJobChunkResponse.ChunkGone); client .SetupSequence(c => c.AllocateJobChunk(Allocate(Stubs.ChunkId2))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk2(Stubs.NodeId2, true, false))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk2(Stubs.NodeId2, true, true))); client .SetupSequence(c => c.AllocateJobChunk(Allocate(Stubs.ChunkId3))) .Returns(AllocateJobChunkResponse.RetryAfter(TimeSpan.FromMinutes(3))) .Returns(AllocateJobChunkResponse.RetryAfter(TimeSpan.FromMinutes(1))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk3(Stubs.NodeId2, false, false))); var sleeps = new List <TimeSpan>(); var source = new WriteTransferItemSource(sleeps.Add, client.Object, jobResponse); var transfers1 = source.EnumerateAvailableTransfers().Take(1).ToArray(); var transfers2 = source.EnumerateAvailableTransfers().Take(2).ToArray(); var transfers3 = source.EnumerateAvailableTransfers().ToArray(); CollectionAssert.AreEqual( new[] { new TransferItem(node1Client, new Blob(Range.ByLength(0, 15), "bar")), }, transfers1, new TransferItemSourceHelpers.TransferItemComparer() ); CollectionAssert.AreEqual( new[] { new TransferItem(node1Client, new Blob(Range.ByLength(10, 10), "foo")), new TransferItem(node2Client, new Blob(Range.ByLength(15, 20), "bar")), }, transfers2, new TransferItemSourceHelpers.TransferItemComparer() ); CollectionAssert.AreEqual( new[] { new TransferItem(node2Client, new Blob(Range.ByLength(0, 10), "hello")), new TransferItem(node2Client, new Blob(Range.ByLength(35, 11), "bar")) }, transfers3, new TransferItemSourceHelpers.TransferItemComparer() ); CollectionAssert.AreEqual( new[] { TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(3), TimeSpan.FromMinutes(1), }, sleeps ); client.VerifyAll(); clientFactory.VerifyAll(); }
public void BasicWriteTransfer(long?maxBlobSize) { var initialJobResponse = Stubs.BuildJobResponse( Stubs.Chunk1(null, false, false), Stubs.Chunk2(null, false, false), Stubs.Chunk3(null, false, false) ); var node1Client = new Mock <IDs3Client>(MockBehavior.Strict); SetupPutObject(node1Client, "hello", 0L, "ABCDefGHIJ"); SetupPutObject(node1Client, "bar", 35L, "zABCDEFGHIJ"); var node2Client = new Mock <IDs3Client>(MockBehavior.Strict); SetupPutObject(node2Client, "bar", 0L, "0123456789abcde"); SetupPutObject(node2Client, "foo", 10L, "klmnopqrst"); SetupPutObject(node2Client, "foo", 0L, "abcdefghij"); SetupPutObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy"); var clientFactory = new Mock <IDs3ClientFactory>(MockBehavior.Strict); clientFactory .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1)) .Returns(node1Client.Object); clientFactory .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2)) .Returns(node2Client.Object); var streams = new Dictionary <string, MockStream> { { "bar", new MockStream("0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ") }, { "foo", new MockStream("abcdefghijklmnopqrst") }, { "hello", new MockStream("ABCDefGHIJ") }, }; var ds3Objects = Stubs .ObjectNames .Select(name => new Ds3Object(name, streams[name].Length)); var client = new Mock <IDs3Client>(MockBehavior.Strict); client .Setup(c => c.BuildFactory(Stubs.Nodes)) .Returns(clientFactory.Object); client .Setup(c => c.BulkPut(ItIsBulkPutRequest(Stubs.BucketName, ds3Objects, maxBlobSize))) .Returns(initialJobResponse); client .Setup(c => c.AllocateJobChunk(ItIsAllocateRequest(Stubs.ChunkId1))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk1(Stubs.NodeId2, false, false))); client .Setup(c => c.AllocateJobChunk(ItIsAllocateRequest(Stubs.ChunkId2))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk2(Stubs.NodeId2, false, false))); client .Setup(c => c.AllocateJobChunk(ItIsAllocateRequest(Stubs.ChunkId3))) .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk3(Stubs.NodeId1, false, false))); var job = new Ds3ClientHelpers(client.Object).StartWriteJob(Stubs.BucketName, ds3Objects, maxBlobSize); var dataTransfers = new ConcurrentQueue <long>(); var itemsCompleted = new ConcurrentQueue <string>(); job.DataTransferred += dataTransfers.Enqueue; job.ItemCompleted += itemsCompleted.Enqueue; job.Transfer(key => streams[key]); node1Client.VerifyAll(); node2Client.VerifyAll(); clientFactory.VerifyAll(); client.VerifyAll(); CollectionAssert.AreEquivalent(new[] { 15L, 20L, 11L, 10L, 10L, 10L }, dataTransfers); CollectionAssert.AreEquivalent(Stubs.ObjectNames, itemsCompleted); }