コード例 #1
0
        public void TestListObjects()
        {
            var ds3ClientMock = new Mock <IDs3Client>(MockBehavior.Strict);

            ds3ClientMock
            .Setup(client => client.GetBucket(It.IsAny <GetBucketRequest>()))
            .Returns(new Queue <GetBucketResponse>(new[] {
                CreateGetBucketResponse(
                    marker: "",
                    nextMarker: "baz",
                    isTruncated: true,
                    ds3objectInfos: new List <Ds3ObjectInfo> {
                    BuildDs3Object("foo", "2cde576e5f5a613e6cee466a681f4929", "2009-10-12T17:50:30.000Z", 12),
                    BuildDs3Object("bar", "f3f98ff00be128139332bcf4b772be43", "2009-10-14T17:50:31.000Z", 12)
                }
                    ),
                CreateGetBucketResponse(
                    marker: "baz",
                    nextMarker: "",
                    isTruncated: false,
                    ds3objectInfos: new List <Ds3ObjectInfo> {
                    BuildDs3Object("baz", "802d45fcb9a3f7d00f1481362edc0ec9", "2009-10-18T17:50:35.000Z", 12)
                }
                    )
            }).Dequeue);

            var objects = new Ds3ClientHelpers(ds3ClientMock.Object).ListObjects("mybucket").ToList();

            Assert.AreEqual(3, objects.Count);
            CheckContents(objects[0], "foo", 12);
            CheckContents(objects[1], "bar", 12);
            CheckContents(objects[2], "baz", 12);
        }
コード例 #2
0
        static void Main(string[] args)
        {
            // Configure and build the core client.
            IDs3Client client = new Ds3Builder(
                ConfigurationManager.AppSettings["Ds3Endpoint"],
                new Credentials(
                    ConfigurationManager.AppSettings["Ds3AccessKey"],
                    ConfigurationManager.AppSettings["Ds3SecretKey"]
                    )
                ).Build();

            // Set up the high-level abstractions.
            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            string bucket    = "bucket-name";
            string directory = "TestData";

            // Creates a bucket if it does not already exist.
            helpers.EnsureBucketExists(bucket);

            // Creates a bulk job with the server based on the files in a directory (recursively).
            IJob job = helpers.StartWriteJob(bucket, FileHelpers.ListObjectsForDirectory(directory));

            // Keep the job id around. This is useful for job recovery in the case of a failure.
            Console.WriteLine("Job id {0} started.", job.JobId);

            // Transfer all of the files.
            job.Transfer(FileHelpers.BuildFilePutter(directory));

            // Wait for user input.
            Console.WriteLine("Press enter to continue.");
            Console.ReadLine();
        }
コード例 #3
0
        public void TestGetBestEffort()
        {
            const string bucketName = "TestGetBestEffort";

            try
            {
                var helpers = new Ds3ClientHelpers(this._client);
                helpers.EnsureBucketExists(bucketName);

                //Upload data for the test
                //3 files: 1 with 3 blobs, 1 with 2 blobs and 1 with 1 blob
                const int blobSize = 10485760; //10MB blob size
                var putJob = helpers.StartWriteJob(bucketName, Utils.Objects, ds3WriteJobOptions: new Ds3WriteJobOptions { MaxUploadSize = blobSize });
                putJob.Transfer(Utils.ReadResource);

                //Getting the data back
                //1 blob will be missing from the 3 blobs object
                var getJob = helpers.StartReadJob(bucketName, Utils.Objects);

                var dataTransfers = new ConcurrentQueue<long>();
                var itemsCompleted = new ConcurrentQueue<string>();
                getJob.DataTransferred += dataTransfers.Enqueue;
                getJob.ItemCompleted += itemsCompleted.Enqueue;

                Assert.Throws<AggregateException>(() => getJob.Transfer(s => new MemoryStream()));

                CollectionAssert.AreEquivalent(new[] { 10485760, 10485760, 10485760, 8027314, 8160373 }, dataTransfers); //7229224 will have an exception
                CollectionAssert.AreEquivalent(Utils.Objects.Select(obj => obj.Name).Where(obj => !obj.Equals("3_blobs.txt")), itemsCompleted);
            }
            finally
            {
                Ds3TestUtils.DeleteBucket(_client, bucketName);
            }
        }
コード例 #4
0
        static void Main(string[] args)
        {
            // Configure and build the core client.
            IDs3Client client = new Ds3Builder(
                ConfigurationManager.AppSettings["Ds3Endpoint"],
                new Credentials(
                    ConfigurationManager.AppSettings["Ds3AccessKey"],
                    ConfigurationManager.AppSettings["Ds3SecretKey"]
                )
            ).Build();

            string bucket = "bucket-name";

            // Set up the high-level abstractions.
            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            // Tracing example
            if (clientSwitch.TraceInfo) { Trace.WriteLine(string.Format("ListObjects from bucket = {0}", bucket)); }

            // Loop through all of the objects in the bucket.
            foreach (var obj in helpers.ListObjects("bucket-name"))
            {
                if (clientSwitch.TraceVerbose) { Trace.WriteLine(string.Format("Object '{0}' of size {1}.", obj.Name, obj.Size)); }
                Console.WriteLine("Object '{0}' of size {1}.", obj.Name, obj.Size);
            }

            // Wait for user input.
            Console.WriteLine("Press enter to continue.");
            Console.ReadLine();
        }
コード例 #5
0
        static void Main(string[] args)
        {
            // Configure and build the core client.
            IDs3Client client = new Ds3Builder(
                ConfigurationManager.AppSettings["Ds3Endpoint"],
                new Credentials(
                    ConfigurationManager.AppSettings["Ds3AccessKey"],
                    ConfigurationManager.AppSettings["Ds3SecretKey"]
                    )
                ).Build();

            // Set up the high-level abstractions.
            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            string bucket    = "bucket-name";
            string directory = "DataFromBucket";

            // Creates a bulk job with all of the objects in the bucket.
            IJob job = helpers.StartReadAllJob(bucket);

            // Same as: IJob job = helpers.StartReadJob(bucket, helpers.ListObjects(bucket));

            // Keep the job id around. This is useful for job recovery in the case of a failure.
            Console.WriteLine("Job id {0} started.", job.JobId);

            // Transfer all of the files.
            job.Transfer(FileHelpers.BuildFileGetter(directory));

            // Wait for user input.
            Console.WriteLine("Press enter to continue.");
            Console.ReadLine();
        }
コード例 #6
0
ファイル: BulkGetExample.cs プロジェクト: GtJosh/ds3_net_sdk
        static void Main(string[] args)
        {
            // Configure and build the core client.
            IDs3Client client = new Ds3Builder(
                ConfigurationManager.AppSettings["Ds3Endpoint"],
                new Credentials(
                    ConfigurationManager.AppSettings["Ds3AccessKey"],
                    ConfigurationManager.AppSettings["Ds3SecretKey"]
                )
            ).Build();

            // Set up the high-level abstractions.
            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            string bucket = "bucket-name";
            string directory = "DataFromBucket";

            // Creates a bulk job with all of the objects in the bucket.
            IJob job = helpers.StartReadAllJob(bucket);
            // Same as: IJob job = helpers.StartReadJob(bucket, helpers.ListObjects(bucket));

            // Keep the job id around. This is useful for job recovery in the case of a failure.
            Console.WriteLine("Job id {0} started.", job.JobId);

            // Tracing example 
            if (clientSwitch.TraceInfo) { Trace.WriteLine(string.Format("StartReadAllJob({0})", bucket)); }
            if (clientSwitch.TraceVerbose) { Trace.WriteLine(string.Format("dd files from: {0}", directory)); }

            // Transfer all of the files.
            job.Transfer(FileHelpers.BuildFileGetter(directory));

            // Wait for user input.
            Console.WriteLine("Press enter to continue.");
            Console.ReadLine();
        }
コード例 #7
0
        public void Startup()
        {

            _client = Ds3TestUtils.CreateClient();
            _helpers = new Ds3ClientHelpers(_client);

            SetupTestData();
        }
コード例 #8
0
        public static void DeleteBucket(IDs3Client client, string bucketName)
        {
            if (client.HeadBucket(new HeadBucketRequest(bucketName)).Status == HeadBucketResponse.StatusType.DoesntExist)
            {
                return;
            }

            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            var objs = helpers.ListObjects(bucketName);

            client.DeleteObjectList(new DeleteObjectListRequest(bucketName, objs));
            client.DeleteBucket(new DeleteBucketRequest(bucketName));
        }
コード例 #9
0
ファイル: Ds3TestUtils.cs プロジェクト: GtJosh/ds3_net_sdk
        internal static string GetSingleObjectWithRange(IDs3Client client, string bucketName, string objectName, Range range)
        {
            string tempFilename = Path.GetTempFileName();

            using (Stream fileStream = new FileStream(tempFilename, FileMode.Truncate, FileAccess.Write))
            {

                IDs3ClientHelpers helper = new Ds3ClientHelpers(client);

                var job = helper.StartPartialReadJob(bucketName, new List<string>(), new List<Ds3PartialObject> { new Ds3PartialObject(range, objectName) });
                
                job.Transfer(key => fileStream);

                return tempFilename;
            }
        }
コード例 #10
0
ファイル: Ds3TestUtils.cs プロジェクト: GtJosh/ds3_net_sdk
        /// <summary>
        /// This will get the object and return the name of the temporary file it was written to.
        /// It is up to the caller to delete the temporary file
        /// </summary>
        public static string GetSingleObject(IDs3Client client, string bucketName, string objectName, int retries = 5)
        {
            string tempFilename = Path.GetTempFileName();

            using (Stream fileStream = new FileStream(tempFilename, FileMode.Truncate, FileAccess.Write))
            {
                
                IDs3ClientHelpers helper = new Ds3ClientHelpers(client, getObjectRetries: retries);

                var job = helper.StartReadJob(bucketName, new List<Ds3Object>{ new Ds3Object(objectName, null)});
            
                job.Transfer(key => fileStream);

                return tempFilename;   
            }
        }
コード例 #11
0
ファイル: BulkPutExample.cs プロジェクト: rpmoore/ds3_net_sdk
        static void Main(string[] args)
        {
            // Configure and build the core client.
            IDs3Client client = new Ds3Builder(
                ConfigurationManager.AppSettings["Ds3Endpoint"],
                new Credentials(
                    ConfigurationManager.AppSettings["Ds3AccessKey"],
                    ConfigurationManager.AppSettings["Ds3SecretKey"]
                )
            ).Build();

            // Set up the high-level abstractions.
            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            string bucket = "bucket-name";
            string directory = "TestData";

            // Creates a bucket if it does not already exist.
            helpers.EnsureBucketExists(bucket);
            if (clientSwitch.TraceVerbose) { Trace.WriteLine(string.Format("Bucket exists: {0}", bucket)); }

            // Creates a bulk job with the server based on the files in a directory (recursively).
            IJob job = helpers.StartWriteJob(bucket, FileHelpers.ListObjectsForDirectory(directory));

            // Tracing example
            if (clientSwitch.TraceInfo) { Trace.WriteLine(string.Format("StartWriteJob({0})", bucket)); }
            if (clientSwitch.TraceVerbose) { Trace.WriteLine(string.Format("Add files from: {0}", directory)); }

            // Keep the job id around. This is useful for job recovery in the case of a failure.
            Console.WriteLine("Job id {0} started.", job.JobId);

            // Transfer all of the files.
            job.Transfer(FileHelpers.BuildFilePutter(directory));

            // Wait for user input.
            Console.WriteLine("Press enter to continue.");
            Console.ReadLine();
        }
コード例 #12
0
        static void Main(string[] args)
        {
            // Configure and build the core client.
            IDs3Client client = new Ds3Builder(
                ConfigurationManager.AppSettings["Ds3Endpoint"],
                new Credentials(
                    ConfigurationManager.AppSettings["Ds3AccessKey"],
                    ConfigurationManager.AppSettings["Ds3SecretKey"]
                    )
                ).Build();

            // Set up the high-level abstractions.
            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            // Loop through all of the objects in the bucket.
            foreach (var obj in helpers.ListObjects("bucket-name"))
            {
                Console.WriteLine("Object '{0}' of size {1}.", obj.Name, obj.Size);
            }

            // Wait for user input.
            Console.WriteLine("Press enter to continue.");
            Console.ReadLine();
        }
コード例 #13
0
        public void PartialObjectReturn()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.ReadFailureChunk(null, false)
            );
            var availableJobResponse = Stubs.BuildJobResponse(

                Stubs.ReadFailureChunk(Stubs.NodeId1, true)
            );

            var node1Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObjectWithContentLengthMismatchException(node1Client, "bar", 0L, "ABCDEFGHIJ", 20L, 10L); // The initial request is for all 20 bytes, but only the first 10 will be sent
            MockHelpers.SetupGetObject(node1Client, "bar", 0L, "JLMNOPQRSTU", Range.ByPosition(9L, 19L));  // The client will request the full last byte based off of when the client fails

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
                .Returns(node1Client.Object);

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.GetBulkJobSpectraS3(MockHelpers.ItIsBulkGetRequest(
                    Stubs.BucketName,
                    null,
                    Stubs.ObjectNames,
                    Enumerable.Empty<Ds3PartialObject>()
                )))
                .Returns(new GetBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.GetJobChunksReadyForClientProcessingSpectraS3(MockHelpers.ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
                .Returns(GetJobChunksReadyForClientProcessingSpectraS3Response.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object).StartReadJob(
                Stubs.BucketName,
                Stubs.ObjectNames.Select(name => new Ds3Object(name, null))
            );

            var dataTransfers = new ConcurrentQueue<long>();
            var itemsCompleted = new ConcurrentQueue<string>();
            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted += itemsCompleted.Enqueue;

            var streams = new ConcurrentDictionary<string, MockStream>();
            job.Transfer(key => streams.GetOrAdd(key, k => new MockStream()));

            node1Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            // Since we are using a mock for the underlying client, the first request does not write any content to the stream
            CollectionAssert.AreEqual(
                new[]
                {
                    new { Key = "bar", Value = "ABCDEFGHIJLMNOPQRSTU" },
                },
                from item in streams
                orderby item.Key
                select new { item.Key, Value = MockHelpers.Encoding.GetString(item.Value.Result) }
            );
            CollectionAssert.AreEquivalent(new[] { 20L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.PartialFailureObjectNames, itemsCompleted);
        }
コード例 #14
0
ファイル: Ds3TestUtils.cs プロジェクト: GtJosh/ds3_net_sdk
        public static void PutFiles(IDs3Client client, string bucketName, IEnumerable<Ds3Object> files,
            Func<string, Stream> createStreamForTransferItem)
        {

            IDs3ClientHelpers helper = new Ds3ClientHelpers(client);

            helper.EnsureBucketExists(bucketName);

            var job = helper.StartWriteJob(bucketName, files);

            job.Transfer(createStreamForTransferItem);

        }
コード例 #15
0
        public void BasicWriteTransfer(long? maxBlobSize)
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
            );

            var node1Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node1Client, "hello", 0L, "ABCDefGHIJ");
            MockHelpers.SetupPutObject(node1Client, "bar", 35L, "zABCDEFGHIJ");

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node2Client, "bar", 0L, "0123456789abcde");
            MockHelpers.SetupPutObject(node2Client, "foo", 10L, "klmnopqrst");
            MockHelpers.SetupPutObject(node2Client, "foo", 0L, "abcdefghij");
            MockHelpers.SetupPutObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy");

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
                .Returns(node1Client.Object);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var streams = new Dictionary<string, string>
            {
                { "bar", "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ" },
                { "foo", "abcdefghijklmnopqrst" },
                { "hello", "ABCDefGHIJ" }
            };
            var ds3Objects = Stubs
                .ObjectNames
                .Select(name => new Ds3Object(name, streams[name].Length));

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.PutBulkJobSpectraS3(MockHelpers.ItIsBulkPutRequest(Stubs.BucketName, ds3Objects, maxBlobSize)))
                .Returns(new PutBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId1)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk1(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId2)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk2(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId3)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk3(Stubs.NodeId1, false, false)));

            var job = new Ds3ClientHelpers(client.Object).StartWriteJob(Stubs.BucketName, ds3Objects, ds3WriteJobOptions: new Ds3WriteJobOptions { MaxUploadSize = maxBlobSize });

            var dataTransfers = new ConcurrentQueue<long>();
            var itemsCompleted = new ConcurrentQueue<string>();
            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted += itemsCompleted.Enqueue;

            job.Transfer(key => new MockStream(streams[key]));

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEquivalent(new[] { 15L, 20L, 11L, 10L, 10L, 10L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.ObjectNames, itemsCompleted);
        }
コード例 #16
0
        public void WithMetadataBeforeTransferException()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                            Stubs.Chunk1(null, false, false),
                            Stubs.Chunk2(null, false, false),
                            Stubs.Chunk3(null, false, false)
                        );

            var node1Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node1Client, "hello", 0L, "ABCDefGHIJ");
            MockHelpers.SetupPutObject(node1Client, "bar", 35L, "zABCDEFGHIJ");

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node2Client, "bar", 0L, "0123456789abcde");
            MockHelpers.SetupPutObject(node2Client, "foo", 10L, "klmnopqrst");
            MockHelpers.SetupPutObject(node2Client, "foo", 0L, "abcdefghij");
            MockHelpers.SetupPutObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy");

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
                .Returns(node1Client.Object);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var streams = new Dictionary<string, string>
            {
                { "bar", "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ" },
                { "foo", "abcdefghijklmnopqrst" },
                { "hello", "ABCDefGHIJ" }
            };
            var ds3Objects = Stubs
                .ObjectNames
                .Select(name => new Ds3Object(name, streams[name].Length));

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.PutBulkJobSpectraS3(MockHelpers.ItIsBulkPutRequest(Stubs.BucketName, ds3Objects, null)))
                .Returns(new PutBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId1)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk1(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId2)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk2(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId3)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk3(Stubs.NodeId1, false, false)));

            var job = new Ds3ClientHelpers(client.Object).StartWriteJob(Stubs.BucketName, ds3Objects);

            job.Transfer(key => new MockStream(streams[key]));

            // Must always be called before the Transfer method.
            // This is will throw Ds3AssertException
            Assert.Throws<Ds3AssertException>(() => job.WithMetadata(null));
        }
コード例 #17
0
ファイル: DataIntegrity.cs プロジェクト: GtJosh/ds3_net_sdk
        public void TestEventEmiter()
        {
            const string bucketName = "eventEmitter";

            try
            {
                var counter = 0;
                Ds3TestUtils.LoadTestData(_client, bucketName);

                var ds3ObjList = new List<Ds3Object> 
                {
                    new Ds3Object("beowulf.txt", null)
                };

                var helpers = new Ds3ClientHelpers(_client);

                var job = helpers.StartReadJob(bucketName, ds3ObjList);

                job.ItemCompleted += item =>
                {
                    Console.WriteLine(@"Got completed event for " + item);
                    counter++;
                };
                job.Transfer(name => Stream.Null);

                Assert.AreEqual(1, counter);
            }
            finally
            {
                Ds3TestUtils.DeleteBucket(_client, bucketName);
            }
        }
コード例 #18
0
        public void TestJobOnFailureEvent()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk2(null, false, false)
            );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk2(Stubs.NodeId2, true, true)
            );

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObject(node2Client, "foo", 10L, "abcdefghij");
            node2Client
                .Setup(c => c.GetObject(MockHelpers.ItIsGetObjectRequest(
                    Stubs.BucketName,
                    "bar",
                    Stubs.JobId,
                    15L,
                    Enumerable.Empty<Range>()
                )))
                .Throws<NullReferenceException>();

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.GetBulkJobSpectraS3(MockHelpers.ItIsBulkGetRequest(
                    Stubs.BucketName,
                    null,
                    Stubs.ObjectNames,
                    Enumerable.Empty<Ds3PartialObject>()
                )))
                .Returns(new GetBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.GetJobChunksReadyForClientProcessingSpectraS3(MockHelpers.ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
                .Returns(GetJobChunksReadyForClientProcessingSpectraS3Response.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object).StartReadJob(
                Stubs.BucketName,
                Stubs.ObjectNames.Select(name => new Ds3Object(name, null))
            );

            var dataTransfers = new ConcurrentQueue<long>();
            job.DataTransferred += dataTransfers.Enqueue;

            job.OnFailure += (fileName, offset, exception) =>
            {
                Assert.AreEqual("bar", fileName);
                Assert.AreEqual(15, offset);
                Assert.AreEqual("Object reference not set to an instance of an object.", exception.Message);
            };

            try
            {
                job.Transfer(key => new MockStream());
                Assert.Fail("Should have thrown an exception.");
            }
            catch (AggregateException e)
            {
                Assert.IsInstanceOf<NullReferenceException>(e.InnerException);
            }

            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEquivalent(new[] { 10L }, dataTransfers);
        }
コード例 #19
0
        public void BasicReadTransfer()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
            );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(Stubs.NodeId2, true, true),
                Stubs.Chunk2(Stubs.NodeId2, true, true),
                Stubs.Chunk3(Stubs.NodeId1, true, true)
            );

            var node1Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObject(node1Client, "hello", 0L, "ABCDefGHIJ");
            MockHelpers.SetupGetObject(node1Client, "bar", 35L, "zABCDEFGHIJ");

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObject(node2Client, "bar", 0L, "0123456789abcde");
            MockHelpers.SetupGetObject(node2Client, "foo", 10L, "klmnopqrst");
            MockHelpers.SetupGetObject(node2Client, "foo", 0L, "abcdefghij");
            MockHelpers.SetupGetObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy");

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
                .Returns(node1Client.Object);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.GetBulkJobSpectraS3(MockHelpers.ItIsBulkGetRequest(
                    Stubs.BucketName,
                    null,
                    Stubs.ObjectNames,
                    Enumerable.Empty<Ds3PartialObject>()
                )))
                .Returns(new GetBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.GetJobChunksReadyForClientProcessingSpectraS3(MockHelpers.ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
                .Returns(GetJobChunksReadyForClientProcessingSpectraS3Response.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object).StartReadJob(
                Stubs.BucketName,
                Stubs.ObjectNames.Select(name => new Ds3Object(name, null))
            );

            var dataTransfers = new ConcurrentQueue<long>();
            var itemsCompleted = new ConcurrentQueue<string>();
            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted += itemsCompleted.Enqueue;

            var streams = new ConcurrentDictionary<string, MockStream>();
            job.Transfer(key => streams.GetOrAdd(key, k => new MockStream()));

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEqual(
                new[]
                {
                    new { Key = "bar", Value = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ" },
                    new { Key = "foo", Value = "abcdefghijklmnopqrst" },
                    new { Key = "hello", Value = "ABCDefGHIJ" },
                },
                from item in streams
                orderby item.Key
                select new { item.Key, Value = MockHelpers.Encoding.GetString(item.Value.Result) }
            );
            CollectionAssert.AreEquivalent(new[] { 15L, 20L, 11L, 10L, 10L, 10L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.ObjectNames, itemsCompleted);
        }
コード例 #20
0
        /// <summary>
        /// This will get the object and return the name of the temporary file it was written to.
        /// It is up to the caller to delete the temporary file
        /// </summary>
        public static string GetSingleObject(IDs3Client client, string bucketName, string objectName, int retries = 5,
            IHelperStrategy<string> helperStrategy = null)
        {
            var tempFilename = Path.GetTempFileName();

            using (Stream fileStream = new FileStream(tempFilename, FileMode.Truncate, FileAccess.Write))
            {
                IDs3ClientHelpers helper = new Ds3ClientHelpers(client, objectTransferAttempts: retries);

                if (helperStrategy == null)
                {
                    helperStrategy = new ReadRandomAccessHelperStrategy<string>();
                }

                var job = helper.StartReadJob(bucketName, new List<Ds3Object> {new Ds3Object(objectName, null)},
                    helperStrategy: helperStrategy);

                job.Transfer(key => fileStream);

                return tempFilename;
            }
        }
コード例 #21
0
        public void BasicWriteTransfer(long?maxBlobSize)
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
                );

            var node1Client = new Mock <IDs3Client>(MockBehavior.Strict);

            SetupPutObject(node1Client, "hello", 0L, "ABCDefGHIJ");
            SetupPutObject(node1Client, "bar", 35L, "zABCDEFGHIJ");

            var node2Client = new Mock <IDs3Client>(MockBehavior.Strict);

            SetupPutObject(node2Client, "bar", 0L, "0123456789abcde");
            SetupPutObject(node2Client, "foo", 10L, "klmnopqrst");
            SetupPutObject(node2Client, "foo", 0L, "abcdefghij");
            SetupPutObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy");

            var clientFactory = new Mock <IDs3ClientFactory>(MockBehavior.Strict);

            clientFactory
            .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
            .Returns(node1Client.Object);
            clientFactory
            .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
            .Returns(node2Client.Object);

            var streams = new Dictionary <string, MockStream>
            {
                { "bar", new MockStream("0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ") },
                { "foo", new MockStream("abcdefghijklmnopqrst") },
                { "hello", new MockStream("ABCDefGHIJ") },
            };
            var ds3Objects = Stubs
                             .ObjectNames
                             .Select(name => new Ds3Object(name, streams[name].Length));

            var client = new Mock <IDs3Client>(MockBehavior.Strict);

            client
            .Setup(c => c.BuildFactory(Stubs.Nodes))
            .Returns(clientFactory.Object);
            client
            .Setup(c => c.BulkPut(ItIsBulkPutRequest(Stubs.BucketName, ds3Objects, maxBlobSize)))
            .Returns(initialJobResponse);
            client
            .Setup(c => c.AllocateJobChunk(ItIsAllocateRequest(Stubs.ChunkId1)))
            .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk1(Stubs.NodeId2, false, false)));
            client
            .Setup(c => c.AllocateJobChunk(ItIsAllocateRequest(Stubs.ChunkId2)))
            .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk2(Stubs.NodeId2, false, false)));
            client
            .Setup(c => c.AllocateJobChunk(ItIsAllocateRequest(Stubs.ChunkId3)))
            .Returns(AllocateJobChunkResponse.Success(Stubs.Chunk3(Stubs.NodeId1, false, false)));

            var job = new Ds3ClientHelpers(client.Object).StartWriteJob(Stubs.BucketName, ds3Objects, maxBlobSize);

            var dataTransfers  = new ConcurrentQueue <long>();
            var itemsCompleted = new ConcurrentQueue <string>();

            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted   += itemsCompleted.Enqueue;

            job.Transfer(key => streams[key]);

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEquivalent(new[] { 15L, 20L, 11L, 10L, 10L, 10L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.ObjectNames, itemsCompleted);
        }
コード例 #22
0
ファイル: DataIntegrity.cs プロジェクト: rpmoore/ds3_net_sdk
        public void TestJobEvents()
        {
            const string bucketName = "TestJobEvents";

            try
            {
                Ds3TestUtils.LoadTestData(_client, bucketName);

                var ds3ObjList = new List<Ds3Object>
                {
                    new Ds3Object("beowulf.txt", null)
                };

                var helpers = new Ds3ClientHelpers(_client);

                Ds3TestUtils.UsingAllStringReadStrategies(strategy =>
                {
                    var counter = 0;
                    var dataTransfered = 0L;

                    var job = helpers.StartReadJob(bucketName, ds3ObjList, strategy);

                    job.ItemCompleted += item =>
                    {
                        counter++;
                    };

                    job.DataTransferred += item =>
                    {
                        dataTransfered += item;
                    };

                    job.Transfer(name => Stream.Null);

                    Assert.AreEqual(1, counter);
                    Assert.AreEqual(294059, dataTransfered);
                });
            }
            finally
            {
                Ds3TestUtils.DeleteBucket(_client, bucketName);
            }
        }
コード例 #23
0
        public void TestWithRetransmitFailingPutBlobs()
        {
            const string bucketName = "TestWithRetransmitFailingPutBlobs";
            try
            {
                var helpers = new Ds3ClientHelpers(this._client, objectTransferAttempts:4);
                helpers.EnsureBucketExists(bucketName);
                const string content = "hi im content";
                var contentBytes = System.Text.Encoding.UTF8.GetBytes(content);

                var stream = new MemoryStream(contentBytes);

                var objects = new List<Ds3Object>
                {
                    new Ds3Object("obj1", contentBytes.Length)
                };

                var job = helpers.StartWriteJob(bucketName, objects);

                job.Transfer(s => stream);
            }
            finally
            {
                Ds3TestUtils.DeleteBucket(_client, bucketName);
            }
        }
コード例 #24
0
        public void startup()
        {
            _endpoint = Environment.GetEnvironmentVariable("DS3_ENDPOINT");
            string accesskey = Environment.GetEnvironmentVariable("DS3_ACCESS_KEY");
            string secretkey = Environment.GetEnvironmentVariable("DS3_SECRET_KEY");
            _proxy = Environment.GetEnvironmentVariable("http_proxy");
            _credentials = new Credentials(accesskey, secretkey);
            Ds3Builder builder = new Ds3Builder(_endpoint, _credentials);
            if (!string.IsNullOrEmpty(_proxy))
            {
                builder.WithProxy(new Uri(_proxy));
            }
            _client = builder.Build();
            _helpers = new Ds3ClientHelpers(_client);

            setupTestData();
        }
コード例 #25
0
        public void ReadTransferFailsUponTransferrerException()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
            );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk2(Stubs.NodeId2, true, true)
            );

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObject(node2Client, "foo", 0L, "abcdefghij");
            node2Client
                .Setup(c => c.GetObject(MockHelpers.ItIsGetObjectRequest(
                    Stubs.BucketName,
                    "bar",
                    Stubs.JobId,
                    15L,
                    Enumerable.Empty<Range>()
                )))
                .Throws<NullReferenceException>();

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.BulkGet(MockHelpers.ItIsBulkGetRequest(
                    Stubs.BucketName,
                    ChunkOrdering.None,
                    Stubs.ObjectNames,
                    Enumerable.Empty<Ds3PartialObject>()
                )))
                .Returns(initialJobResponse);
            client
                .Setup(c => c.GetAvailableJobChunks(MockHelpers.ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
                .Returns(GetAvailableJobChunksResponse.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object).StartReadJob(
                Stubs.BucketName,
                Stubs.ObjectNames.Select(name => new Ds3Object(name, null))
            );
            try
            {
                job.Transfer(key => new MockStream());
                Assert.Fail("Should have thrown an exception.");
            }
            catch (AggregateException e)
            {
                Assert.IsInstanceOf<NullReferenceException>(e.InnerException);
            }
        }
コード例 #26
0
        public void ReadTransferFailsUponTransferrerException()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
                );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk2(Stubs.NodeId2, true, true)
                );

            var node2Client = new Mock <IDs3Client>(MockBehavior.Strict);

            SetupGetObject(node2Client, "foo", 0L, "abcdefghij");
            node2Client
            .Setup(c => c.GetObject(ItIsGetObjectRequest(
                                        Stubs.BucketName,
                                        "bar",
                                        Stubs.JobId,
                                        15L,
                                        Enumerable.Empty <Range>()
                                        )))
            .Throws <NullReferenceException>();

            var clientFactory = new Mock <IDs3ClientFactory>(MockBehavior.Strict);

            clientFactory
            .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
            .Returns(node2Client.Object);

            var client = new Mock <IDs3Client>(MockBehavior.Strict);

            client
            .Setup(c => c.BuildFactory(Stubs.Nodes))
            .Returns(clientFactory.Object);
            client
            .Setup(c => c.BulkGet(ItIsBulkGetRequest(
                                      Stubs.BucketName,
                                      ChunkOrdering.None,
                                      Stubs.ObjectNames,
                                      Enumerable.Empty <Ds3PartialObject>()
                                      )))
            .Returns(initialJobResponse);
            client
            .Setup(c => c.GetAvailableJobChunks(ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
            .Returns(GetAvailableJobChunksResponse.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object).StartReadJob(
                Stubs.BucketName,
                Stubs.ObjectNames.Select(name => new Ds3Object(name, null))
                );

            try
            {
                job.Transfer(key => new MockStream());
                Assert.Fail("Should have thrown an exception.");
            }
            catch (AggregateException e)
            {
                Assert.IsInstanceOf <NullReferenceException>(e.InnerException);
            }
        }
コード例 #27
0
        public void PartialReadTransfer()
        {
            var partialObjects = new[]
            {
                new Ds3PartialObject(Range.ByLength(0L, 4L), "foo"),
                new Ds3PartialObject(Range.ByLength(6L, 10L), "foo"),
                new Ds3PartialObject(Range.ByLength(18L, 1L), "foo"),
                new Ds3PartialObject(Range.ByLength(10L, 26L), "bar"),
            };
            var fullObjects = new[] { "hello" };

            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
            );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(Stubs.NodeId2, true, true),
                Stubs.Chunk2(Stubs.NodeId2, true, true),
                Stubs.Chunk3(Stubs.NodeId1, true, true)
            );

            var node1Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObject(node1Client, "hello", 0L, "ABCDefGHIJ", Range.ByLength(0L, 10L));
            MockHelpers.SetupGetObject(node1Client, "bar", 35L, "z", Range.ByLength(35L, 1L));

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObject(node2Client, "bar", 0L, "abcde", Range.ByLength(10L, 5L));
            MockHelpers.SetupGetObject(node2Client, "foo", 10L, "klmnop!", Range.ByLength(10L, 6L), Range.ByLength(18L, 1L));
            MockHelpers.SetupGetObject(node2Client, "foo", 0L, "abcdghij", Range.ByLength(0L, 4L), Range.ByLength(6L, 4L));
            MockHelpers.SetupGetObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy", Range.ByLength(15L, 20L));

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
                .Returns(node1Client.Object);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.GetBulkJobSpectraS3(MockHelpers.ItIsBulkGetRequest(
                    Stubs.BucketName,
                    null,
                    fullObjects,
                    partialObjects
                )))
                .Returns(new GetBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.GetJobChunksReadyForClientProcessingSpectraS3(MockHelpers.ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
                .Returns(GetJobChunksReadyForClientProcessingSpectraS3Response.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object)
                .StartPartialReadJob(Stubs.BucketName, fullObjects, partialObjects);
            CollectionAssert.AreEquivalent(
                partialObjects.Concat(new[] { new Ds3PartialObject(Range.ByLength(0L, 10L), "hello") }),
                job.AllItems
            );

            var dataTransfers = new ConcurrentQueue<long>();
            var itemsCompleted = new ConcurrentQueue<Ds3PartialObject>();
            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted += itemsCompleted.Enqueue;

            var streams = new ConcurrentDictionary<Ds3PartialObject, MockStream>();
            job.Transfer(key => streams.GetOrAdd(key, k => new MockStream()));

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            var fullObjectPart = new Ds3PartialObject(Range.ByLength(0L, 10L), fullObjects[0]);
            CollectionAssert.AreEqual(
                new[]
                {
                    new { Key = partialObjects[0], Value = "abcd" },
                    new { Key = partialObjects[1], Value = "ghijklmnop" },
                    new { Key = partialObjects[2], Value = "!" },
                    new { Key = partialObjects[3], Value = "abcdefghijklmnopqrstuvwxyz" },
                    new { Key = fullObjectPart, Value = "ABCDefGHIJ" },
                }.OrderBy(it => it.Key).ToArray(),
                (
                    from item in streams
                    orderby item.Key
                    select new { item.Key, Value = MockHelpers.Encoding.GetString(item.Value.Result) }
                ).ToArray()
            );
            CollectionAssert.AreEquivalent(
                new[] { 1L, 1L, 4L, 4L, 5L, 6L, 10L, 20L },
                dataTransfers.Sorted().ToArray()
            );
            CollectionAssert.AreEquivalent(partialObjects.Concat(new[] { fullObjectPart }), itemsCompleted);
        }
コード例 #28
0
        public void PartialReadTransfer()
        {
            var partialObjects = new[]
            {
                new Ds3PartialObject(Range.ByLength(0L, 4L), "foo"),
                new Ds3PartialObject(Range.ByLength(6L, 10L), "foo"),
                new Ds3PartialObject(Range.ByLength(18L, 1L), "foo"),
                new Ds3PartialObject(Range.ByLength(10L, 26L), "bar"),
            };
            var fullObjects = new[] { "hello" };

            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
                );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(Stubs.NodeId2, true, true),
                Stubs.Chunk2(Stubs.NodeId2, true, true),
                Stubs.Chunk3(Stubs.NodeId1, true, true)
                );

            var node1Client = new Mock <IDs3Client>(MockBehavior.Strict);

            SetupGetObject(node1Client, "hello", 0L, "ABCDefGHIJ", Range.ByLength(0L, 10L));
            SetupGetObject(node1Client, "bar", 35L, "z", Range.ByLength(35L, 1L));

            var node2Client = new Mock <IDs3Client>(MockBehavior.Strict);

            SetupGetObject(node2Client, "bar", 0L, "abcde", Range.ByLength(10L, 5L));
            SetupGetObject(node2Client, "foo", 10L, "klmnop!", Range.ByLength(10L, 6L), Range.ByLength(18L, 1L));
            SetupGetObject(node2Client, "foo", 0L, "abcdghij", Range.ByLength(0L, 4L), Range.ByLength(6L, 4L));
            SetupGetObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy", Range.ByLength(15L, 20L));

            var clientFactory = new Mock <IDs3ClientFactory>(MockBehavior.Strict);

            clientFactory
            .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
            .Returns(node1Client.Object);
            clientFactory
            .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
            .Returns(node2Client.Object);

            var client = new Mock <IDs3Client>(MockBehavior.Strict);

            client
            .Setup(c => c.BuildFactory(Stubs.Nodes))
            .Returns(clientFactory.Object);
            client
            .Setup(c => c.BulkGet(ItIsBulkGetRequest(
                                      Stubs.BucketName,
                                      ChunkOrdering.None,
                                      fullObjects,
                                      partialObjects
                                      )))
            .Returns(initialJobResponse);
            client
            .Setup(c => c.GetAvailableJobChunks(ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
            .Returns(GetAvailableJobChunksResponse.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object)
                      .StartPartialReadJob(Stubs.BucketName, fullObjects, partialObjects);

            CollectionAssert.AreEquivalent(
                partialObjects.Concat(new[] { new Ds3PartialObject(Range.ByLength(0L, 10L), "hello") }),
                job.AllItems
                );

            var dataTransfers  = new ConcurrentQueue <long>();
            var itemsCompleted = new ConcurrentQueue <Ds3PartialObject>();

            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted   += itemsCompleted.Enqueue;

            var streams = new ConcurrentDictionary <Ds3PartialObject, MockStream>();

            job.Transfer(key => streams.GetOrAdd(key, k => new MockStream()));

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            var fullObjectPart = new Ds3PartialObject(Range.ByLength(0L, 10L), fullObjects[0]);

            CollectionAssert.AreEqual(
                new[]
            {
                new { Key = partialObjects[0], Value = "abcd" },
                new { Key = partialObjects[1], Value = "ghijklmnop" },
                new { Key = partialObjects[2], Value = "!" },
                new { Key = partialObjects[3], Value = "abcdefghijklmnopqrstuvwxyz" },
                new { Key = fullObjectPart, Value = "ABCDefGHIJ" },
            }.OrderBy(it => it.Key).ToArray(),
                (
                    from item in streams
                    orderby item.Key
                    select new { item.Key, Value = _encoding.GetString(item.Value.Result) }
                ).ToArray()
                );
            CollectionAssert.AreEquivalent(
                new[] { 1L, 1L, 4L, 4L, 5L, 6L, 10L, 20L },
                dataTransfers.Sorted().ToArray()
                );
            CollectionAssert.AreEquivalent(partialObjects.Concat(new[] { fullObjectPart }), itemsCompleted);
        }
コード例 #29
0
        public void ReadTransferFailsUponTransferStrategyException()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
            );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(Stubs.NodeId2, true, true),
                Stubs.Chunk2(Stubs.NodeId2, true, true),
                Stubs.Chunk3(Stubs.NodeId2, true, true)
            );

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupGetObject(node2Client, "foo", 0L, "abcdefghij");
            MockHelpers.SetupGetObject(node2Client, "foo", 10L, "klmnopqrst");

            MockHelpers.SetupGetObject(node2Client, "hello", 0L, "ABCDefGHIJ");

            MockHelpers.SetupGetObject(node2Client, "bar", 0L, "0123456789abcde");
            MockHelpers.SetupGetObject(node2Client, "bar", 35L, "zABCDEFGHIJ");

            node2Client
                .Setup(c => c.GetObject(MockHelpers.ItIsGetObjectRequest(
                    Stubs.BucketName,
                    "bar",
                    Stubs.JobId,
                    15L,
                    Enumerable.Empty<Range>()
                )))
                .Throws<NullReferenceException>();

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.GetBulkJobSpectraS3(MockHelpers.ItIsBulkGetRequest(
                    Stubs.BucketName,
                    null,
                    Stubs.ObjectNames,
                    Enumerable.Empty<Ds3PartialObject>()
                )))
                .Returns(new GetBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.GetJobChunksReadyForClientProcessingSpectraS3(MockHelpers.ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
                .Returns(GetJobChunksReadyForClientProcessingSpectraS3Response.Success(TimeSpan.FromMinutes(0), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object).StartReadJob(
                Stubs.BucketName,
                Stubs.ObjectNames.Select(name => new Ds3Object(name, null))
            );

            var dataTransfers = new ConcurrentQueue<long>();
            var itemsCompleted = new ConcurrentQueue<string>();
            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted += itemsCompleted.Enqueue;

            try
            {
                job.Transfer(key => new MockStream());
                Assert.Fail("Should have thrown an exception.");
            }
            catch (AggregateException e)
            {
                Assert.IsInstanceOf<NullReferenceException>(e.InnerException);
            }

            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEquivalent(new[] { 15L, 11L, 10L, 10L, 10L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.ObjectNames.Where(obj => !obj.Equals("bar")), itemsCompleted);
        }
コード例 #30
0
        public static void LoadTestData(IDs3Client client, string bucketName)
        {
            IDs3ClientHelpers helper = new Ds3ClientHelpers(client);

            helper.EnsureBucketExists(bucketName);

            var job = helper.StartWriteJob(bucketName, Objects);

            job.Transfer(key => ReadResource(key));
        }
コード例 #31
0
        public void TestListObjects()
        {
            var ds3ClientMock = new Mock<IDs3Client>(MockBehavior.Strict);
            ds3ClientMock
                .Setup(client => client.GetBucket(It.IsAny<GetBucketRequest>()))
                .Returns(new Queue<GetBucketResponse>(new[] {
                    MockHelpers.CreateGetBucketResponse(
                        marker: "",
                        nextMarker: "baz",
                        isTruncated: true,
                        ds3ObjectInfos: new List<Contents> {
                            MockHelpers.BuildDs3Object(
                                "foo",
                                "2cde576e-5f5a-613e-6cee-466a681f4929",
                                "2009-10-12T17:50:30.000Z",
                                12),
                            MockHelpers.BuildDs3Object(
                                "bar",
                                "f3f98ff0-0be1-2813-9332-bcf4b772be43",
                                "2009-10-14T17:50:31.000Z",
                                12)
                        }
                    ),
                    MockHelpers.CreateGetBucketResponse(
                        marker: "baz",
                        nextMarker: "",
                        isTruncated: false,
                        ds3ObjectInfos: new List<Contents> {
                            MockHelpers.BuildDs3Object("baz", "802d45fc-b9a3-f7d0-0f14-81362edc0ec9", "2009-10-18T17:50:35.000Z", 12)
                        }
                    )
                }).Dequeue);

            var objects = new Ds3ClientHelpers(ds3ClientMock.Object).ListObjects("mybucket").ToList();

            Assert.AreEqual(3, objects.Count);
            MockHelpers.CheckContents(objects[0], "foo", 12);
            MockHelpers.CheckContents(objects[1], "bar", 12);
            MockHelpers.CheckContents(objects[2], "baz", 12);
        }
コード例 #32
0
        public void Startup()
        {
            try
            {
                this._client = Ds3TestUtils.CreateClient(this._copyBufferSize);
                this._helpers = new Ds3ClientHelpers(this._client);

                var dataPolicyId = TempStorageUtil.SetupDataPolicy(FixtureName, false, ChecksumType.Type.MD5, _client);
                _envStorageIds = TempStorageUtil.Setup(FixtureName, dataPolicyId, _client);
            }
            catch (Exception)
            {
                // So long as any SetUp method runs without error, the TearDown method is guaranteed to run.
                // It will not run if a SetUp method fails or throws an exception.
                Teardown();
                throw;
            }
        }
コード例 #33
0
        public void TestPutJobWithRetransmit2Blobs1Fail()
        {
            var initialJobResponse = Stubs.BuildPutJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
            );

            var node1Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node1Client, "hello", 0L, "ABCDefGHIJ");
            MockHelpers.SetupPutObject(node1Client, "bar", 35L, "zABCDEFGHIJ");

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node2Client, "bar", 0L, "0123456789abcde");
            MockHelpers.SetupPutObject(node2Client, "foo", 10L, "klmnopqrst");
            MockHelpers.SetupPutObject(node2Client, "foo", 0L, "abcdefghij");
            MockHelpers.SetupPutObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy");

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
                .Returns(node1Client.Object);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var streams = new Dictionary<string, string>
            {
                { "bar", "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ" },
                { "foo", "abcdefghijklmnopqrst" },
                { "hello", "ABCDefGHIJ" }
            };
            var ds3Objects = Stubs
                .ObjectNames
                .Select(name => new Ds3Object(name, streams[name].Length));

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.PutBulkJobSpectraS3(MockHelpers.ItIsBulkPutRequest(Stubs.BucketName, ds3Objects, null)))
                .Returns(new PutBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId1)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk1(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId2)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk2(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId3)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk3(Stubs.NodeId1, false, false)));

            const int timesToFail = 2;
            var timesToFailLeft = 0;
            node1Client
                .Setup(c => c.PutObject(MockHelpers.ItIsPutObjectRequest(
                    Stubs.BucketName, "hello", Stubs.JobId, 0L)))
                .Callback(() =>
                {
                    if (timesToFailLeft < timesToFail)
                    {
                        timesToFailLeft++;
                        throw new IOException();
                    }
                });

            const int timesToFail2 = 3;
            var timesToFailLeft2 = 0;
            node2Client
                .Setup(c => c.PutObject(MockHelpers.ItIsPutObjectRequest(
                    Stubs.BucketName, "bar", Stubs.JobId, 0L)))
                .Callback(() =>
                {
                    if (timesToFailLeft2 < timesToFail2)
                    {
                        timesToFailLeft2++;
                        throw new IOException();
                    }
                });

            var job = new Ds3ClientHelpers(client.Object, objectTransferAttempts:2).StartWriteJob(Stubs.BucketName, ds3Objects);

            var dataTransfers = new ConcurrentQueue<long>();
            var itemsCompleted = new ConcurrentQueue<string>();
            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted += itemsCompleted.Enqueue;

            try
            {
                job.Transfer(key => new MockStream(streams[key]));

            }
            catch (AggregateException age)
            {
                var expectedMessage = string.Format(Resources.NoMoreRetransmitException, "2", "bar", "0");
                Assert.AreEqual(expectedMessage, age.InnerExceptions[0].Message);
            }

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEquivalent(new[] { 20L, 11L, 10L, 10L, 10L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.ObjectNames.Where(obj => !"bar".Equals(obj)), itemsCompleted);
        }
コード例 #34
0
        static void Main(string[] args)
        {
            // Create the IDs3Client instance
            IDs3Client client = Ds3Builder.FromEnv().Build();

            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            string bucket = "bucket-name";
            string directory = "TestData";

            // Create a bucket if it does not already exist.
            helpers.EnsureBucketExists(bucket);

            // Generate the list of files to put for the job
            var objectList = FileHelpers.ListObjectsForDirectory(directory);

            // This is used later to create the stream for the object, this
            // can be replaced with custom code to create the file stream
            var streamBuilder = FileHelpers.BuildFilePutter(directory);

            // Create the bulk put job
            var putBulkResponse = client.PutBulkJobSpectraS3(
                new PutBulkJobSpectraS3Request(bucket, objectList));

            // Get the jobId from the response
            Guid jobId = putBulkResponse.ResponsePayload.JobId;

            // Create a set of chunk ids to know what has been processed and what as yet to be processed
            var chunkIds = from chunk in putBulkResponse.ResponsePayload.Objects
                           select chunk.ChunkId;
            var chunkSet = new HashSet<Guid>(chunkIds);

            // Continue processing until all the chunks have been sent.
            while (chunkSet.Count > 0)
            {
                // Get the set of chunks that are currentlty available for procesing
                var chunkResponse = client.GetJobChunksReadyForClientProcessingSpectraS3(
                    new GetJobChunksReadyForClientProcessingSpectraS3Request(jobId)
                        .WithPreferredNumberOfChunks(10)); // This can be changed to any number
                                                           // but 10 is a good default and you
                                                           // are not guarunteed to get this many

                chunkResponse.Match((ts, response) =>
                {
                    // If this matcher is called this means that we can safely process chunks without
                    // fear of the PutObject call failing due to cache unavailable conditions

                    // It is also safe to process all the chunks in parallel as well, or to process
                    // each chunk sequentially, while sending each object in parallel

                    foreach (var chunk in response.Objects)
                    {
                        chunkSet.Remove(chunk.ChunkId);

                        // this next step can be done in parallel
                        foreach (var obj in chunk.ObjectsList)
                        {
                            // Create the stream and seek to the correct position for that
                            // blob offset, and then wrap in a PutObjectRequestStream to
                            // limit the amount of data transffered to the lenght of the
                            // blob being processed.
                            var stream = streamBuilder.Invoke(obj.Name);
                            stream.Seek(obj.Offset, System.IO.SeekOrigin.Begin);
                            var wrappedStream = new PutObjectRequestStream(stream, obj.Length);

                            // Put the blob
                            client.PutObject(
                                new PutObjectRequest(bucket, obj.Name, wrappedStream)
                                    .WithJob(jobId)
                                    .WithOffset(obj.Offset));
                        }
                    }
                },
                ts => {
                    // If this matcher is called this means that we need to wait before we can safely
                    // continue processing chunks
                    Thread.Sleep(ts);
                });
            }
        }
コード例 #35
0
        public static void Main()
        {
            // Configure and build the core client.
            var client = new Ds3Builder(
                ConfigurationManager.AppSettings["Ds3Endpoint"],
                new Credentials(
                    ConfigurationManager.AppSettings["Ds3AccessKey"],
                    ConfigurationManager.AppSettings["Ds3SecretKey"]
                )
            ).Build();

            // Set up the high-level abstractions.
            IDs3ClientHelpers helpers = new Ds3ClientHelpers(client);

            const string bucketName = "BulkPutWithStreamStrategy";
            const string directory = "TestData";

            // Creates a bucket if it does not already exist.
            helpers.EnsureBucketExists(bucketName);
            if (ClientSwitch.TraceVerbose) { Trace.WriteLine(string.Format("Bucket exists: {0}", bucketName)); }

            // Creates a bulk job with the server based on the files in a directory (recursively).
            var directoryObjects = FileHelpers.ListObjectsForDirectory(directory).ToList();
            var job = helpers.StartWriteJob(bucketName, directoryObjects, helperStrategy: new WriteStreamHelperStrategy());

            // Tracing example
            if (ClientSwitch.TraceInfo) { Trace.WriteLine(string.Format("StartWriteJob({0})", bucketName)); }
            if (ClientSwitch.TraceVerbose) { Trace.WriteLine(string.Format("Add files from: {0}", directory)); }

            // Keep the job id around. This is useful for job recovery in the case of a failure.
            Console.WriteLine("Job id {0} started.", job.JobId);

            var cryptoStreams = new Dictionary<string, CryptoStream>();
            var md5s = new Dictionary<string, MD5>();

            directoryObjects.ForEach(obj =>
            {
                var md5 = MD5.Create();
                var fileStream = File.OpenRead(Path.Combine(directory, obj.Name));
                var md5Stream = new CryptoStream(fileStream, md5, CryptoStreamMode.Read);

                cryptoStreams.Add(obj.Name, md5Stream);
                md5s.Add(obj.Name, md5);
            });

            // Transfer all of the files.
            job.Transfer(fileName => cryptoStreams[fileName]);

            foreach (var stream in cryptoStreams.Select(pair => pair.Value).Where(stream => !stream.HasFlushedFinalBlock))
            {
                stream.FlushFinalBlock();
            }

            foreach (var md5 in md5s)
            {
                Console.WriteLine("Done transferring file {0} with MD5 value {1}", md5.Key, Convert.ToBase64String(md5.Value.Hash));
            }

            // Wait for user input.
            Console.WriteLine("Press enter to continue.");
            Console.ReadLine();
        }
コード例 #36
0
        public void WriteTransferFailsUponTransferStrategyRecoverableException(Exception ex)
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
            );

            var node1Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node1Client, "hello", 0L, "ABCDefGHIJ");
            MockHelpers.SetupPutObject(node1Client, "bar", 35L, "zABCDEFGHIJ");

            var node2Client = new Mock<IDs3Client>(MockBehavior.Strict);
            MockHelpers.SetupPutObject(node2Client, "bar", 0L, "0123456789abcde");
            MockHelpers.SetupPutObject(node2Client, "foo", 10L, "klmnopqrst");
            MockHelpers.SetupPutObject(node2Client, "foo", 0L, "abcdefghij");

            node2Client
                .Setup(c => c.PutObject(MockHelpers.ItIsPutObjectRequest(
                    Stubs.BucketName,
                    "bar",
                    Stubs.JobId,
                    15L)))
                .Throws(ex);

            var clientFactory = new Mock<IDs3ClientFactory>(MockBehavior.Strict);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
                .Returns(node1Client.Object);
            clientFactory
                .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
                .Returns(node2Client.Object);

            var streams = new Dictionary<string, string>
            {
                { "bar", "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ" },
                { "foo", "abcdefghijklmnopqrst" },
                { "hello", "ABCDefGHIJ" }
            };
            var ds3Objects = Stubs
                .ObjectNames
                .Select(name => new Ds3Object(name, streams[name].Length));

            var client = new Mock<IDs3Client>(MockBehavior.Strict);
            client
                .Setup(c => c.BuildFactory(Stubs.Nodes))
                .Returns(clientFactory.Object);
            client
                .Setup(c => c.PutBulkJobSpectraS3(MockHelpers.ItIsBulkPutRequest(Stubs.BucketName, ds3Objects, null)))
                .Returns(new PutBulkJobSpectraS3Response(initialJobResponse));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId1)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk1(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId2)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk2(Stubs.NodeId2, false, false)));
            client
                .Setup(c => c.AllocateJobChunkSpectraS3(MockHelpers.ItIsAllocateRequest(Stubs.ChunkId3)))
                .Returns(AllocateJobChunkSpectraS3Response.Success(Stubs.Chunk3(Stubs.NodeId1, false, false)));

            var job = new Ds3ClientHelpers(client.Object).StartWriteJob(Stubs.BucketName, ds3Objects);

            var dataTransfers = new ConcurrentQueue<long>();
            var itemsCompleted = new ConcurrentQueue<string>();
            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted += itemsCompleted.Enqueue;

            try
            {
                job.Transfer(key => new MockStream(streams[key]));
                Assert.Fail("Should have thrown an exception.");
            }
            catch (AggregateException e)
            {
                Assert.IsInstanceOf<Ds3NoMoreRetransmitException>(e.InnerException);
            }

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEquivalent(new[] { 15L,11L, 10L, 10L, 10L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.ObjectNames.Where(obj => !obj.Equals("bar")), itemsCompleted);
        }
コード例 #37
0
        public void TestWithRetransmitFailingPutBlobsWithNonSeekableStream()
        {
            const string bucketName = "TestWithRetransmitFailingPutBlobsWithNonSeekableStream";
            try
            {
                var helpers = new Ds3ClientHelpers(this._client, objectTransferAttempts: 4);
                helpers.EnsureBucketExists(bucketName);

                const string content = "hi im content";
                var contentBytes = System.Text.Encoding.UTF8.GetBytes(content);

                var stream = new NonSeekableStream(new MemoryStream(contentBytes));

                var objects = new List<Ds3Object>
                {
                    new Ds3Object("obj1", contentBytes.Length)
                };

                var job = helpers.StartWriteJob(bucketName, objects);

                try
                {
                    job.Transfer(s => stream);
                    Assert.Fail();
                }
                catch (AggregateException age)
                {
                    Assert.AreEqual(typeof(Ds3NotSupportedStream), age.InnerExceptions[0].GetType());
                }
            }
            finally
            {
                Ds3TestUtils.DeleteBucket(_client, bucketName);
            }
        }
コード例 #38
0
        public void BasicReadTransfer()
        {
            var initialJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(null, false, false),
                Stubs.Chunk2(null, false, false),
                Stubs.Chunk3(null, false, false)
                );
            var availableJobResponse = Stubs.BuildJobResponse(
                Stubs.Chunk1(Stubs.NodeId2, true, true),
                Stubs.Chunk2(Stubs.NodeId2, true, true),
                Stubs.Chunk3(Stubs.NodeId1, true, true)
                );

            var node1Client = new Mock <IDs3Client>(MockBehavior.Strict);

            SetupGetObject(node1Client, "hello", 0L, "ABCDefGHIJ");
            SetupGetObject(node1Client, "bar", 35L, "zABCDEFGHIJ");

            var node2Client = new Mock <IDs3Client>(MockBehavior.Strict);

            SetupGetObject(node2Client, "bar", 0L, "0123456789abcde");
            SetupGetObject(node2Client, "foo", 10L, "klmnopqrst");
            SetupGetObject(node2Client, "foo", 0L, "abcdefghij");
            SetupGetObject(node2Client, "bar", 15L, "fghijklmnopqrstuvwxy");

            var clientFactory = new Mock <IDs3ClientFactory>(MockBehavior.Strict);

            clientFactory
            .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId1))
            .Returns(node1Client.Object);
            clientFactory
            .Setup(cf => cf.GetClientForNodeId(Stubs.NodeId2))
            .Returns(node2Client.Object);

            var client = new Mock <IDs3Client>(MockBehavior.Strict);

            client
            .Setup(c => c.BuildFactory(Stubs.Nodes))
            .Returns(clientFactory.Object);
            client
            .Setup(c => c.BulkGet(ItIsBulkGetRequest(
                                      Stubs.BucketName,
                                      ChunkOrdering.None,
                                      Stubs.ObjectNames,
                                      Enumerable.Empty <Ds3PartialObject>()
                                      )))
            .Returns(initialJobResponse);
            client
            .Setup(c => c.GetAvailableJobChunks(ItIsGetAvailableJobChunksRequest(Stubs.JobId)))
            .Returns(GetAvailableJobChunksResponse.Success(TimeSpan.FromMinutes(1), availableJobResponse));

            var job = new Ds3ClientHelpers(client.Object).StartReadJob(
                Stubs.BucketName,
                Stubs.ObjectNames.Select(name => new Ds3Object(name, null))
                );

            var dataTransfers  = new ConcurrentQueue <long>();
            var itemsCompleted = new ConcurrentQueue <string>();

            job.DataTransferred += dataTransfers.Enqueue;
            job.ItemCompleted   += itemsCompleted.Enqueue;

            var streams = new ConcurrentDictionary <string, MockStream>();

            job.Transfer(key => streams.GetOrAdd(key, k => new MockStream()));

            node1Client.VerifyAll();
            node2Client.VerifyAll();
            clientFactory.VerifyAll();
            client.VerifyAll();

            CollectionAssert.AreEqual(
                new[]
            {
                new { Key = "bar", Value = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJ" },
                new { Key = "foo", Value = "abcdefghijklmnopqrst" },
                new { Key = "hello", Value = "ABCDefGHIJ" },
            },
                from item in streams
                orderby item.Key
                select new { item.Key, Value = _encoding.GetString(item.Value.Result) }
                );
            CollectionAssert.AreEquivalent(new[] { 15L, 20L, 11L, 10L, 10L, 10L }, dataTransfers);
            CollectionAssert.AreEquivalent(Stubs.ObjectNames, itemsCompleted);
        }