public void TestChecksumStreamingWithMultiStreams()
        {
            const string bucketName = "TestChecksumStreamingWithMultiStreams";

            try
            {
                // Creates a bucket if it does not already exist.
                _helpers.EnsureBucketExists(bucketName);

                /* using 1GB file with 100MB blobs size so each stream will have 1 chunk with 10 blobs */
                const long streamLength = 1L * 1024L * 1024L * 1024L;
                const long blobSize = 100L * 1024L * 1024L;

                var directoryObjects = new List<Ds3Object>
                {
                    new Ds3Object("bigFile1", streamLength),
                    new Ds3Object("bigFile2", streamLength),
                    new Ds3Object("bigFile3", streamLength)
                };

                // Test the PUT
                var putJob = _helpers.StartWriteJob(bucketName, directoryObjects, new Ds3WriteJobOptions { MaxUploadSize = blobSize }, new WriteStreamHelperStrategy());

                var putCryptoStreams = new Dictionary<string, CryptoStream>();
                var putMd5s = new Dictionary<string, MD5>();

                directoryObjects.ForEach(obj =>
                {
                    var md5 = MD5.Create();
                    var fileStream = new ChecksumStream(streamLength, this._copyBufferSize.Value);
                    var md5Stream = new CryptoStream(fileStream, md5, CryptoStreamMode.Read);

                    putCryptoStreams.Add(obj.Name, md5Stream);
                    putMd5s.Add(obj.Name, md5);
                });

                putJob.Transfer(fileName => putCryptoStreams[fileName]);

                foreach (var stream in putCryptoStreams.Select(pair => pair.Value).Where(stream => !stream.HasFlushedFinalBlock))
                {
                    stream.FlushFinalBlock();
                }

                foreach (var md5 in putMd5s.Select(pair => pair.Value))
                {
                    Assert.AreEqual("Rt83cCvGZHQGu3eRIdfJIQ==", Convert.ToBase64String(md5.Hash));
                }

                // Test the GET
                var getJob = _helpers.StartReadAllJob(bucketName, helperStrategy: new ReadStreamHelperStrategy());

                var getCryptoStreams = new Dictionary<string, CryptoStream>();
                var getMd5s = new Dictionary<string, MD5>();

                directoryObjects.ForEach(obj =>
                {
                    var md5 = MD5.Create();
                    var fileStream = new ChecksumStream(streamLength, this._copyBufferSize.Value);
                    var md5Stream = new CryptoStream(fileStream, md5, CryptoStreamMode.Write);

                    getCryptoStreams.Add(obj.Name, md5Stream);
                    getMd5s.Add(obj.Name, md5);
                });

                getJob.Transfer(fileName => getCryptoStreams[fileName]);

                foreach (var stream in getCryptoStreams.Select(pair => pair.Value).Where(stream => !stream.HasFlushedFinalBlock))
                {
                    stream.FlushFinalBlock();
                }

                foreach (var md5 in getMd5s.Select(pair => pair.Value))
                {
                    Assert.AreEqual("Rt83cCvGZHQGu3eRIdfJIQ==", Convert.ToBase64String(md5.Hash));
                }
            }
            finally
            {
                Ds3TestUtils.DeleteBucket(_client, bucketName);
            }
        }
        public void TestChecksumStreamingWithMultiChunks()
        {
            const string bucketName = "TestChecksumStreamingWithMultiChunks";

            try
            {
                // Creates a bucket if it does not already exist.
                _helpers.EnsureBucketExists(bucketName);

                const long streamLength = 150L * 1024L * 1024L * 1024L; //that way we enforce that we get 2 chunks
                var directoryObjects = new List<Ds3Object> { new Ds3Object("bigFile", streamLength) };

                const long blobSize = 1L * 1024L * 1024L * 1024L;

                // Test the PUT
                var putJob = _helpers.StartWriteJob(bucketName, directoryObjects, new Ds3WriteJobOptions { MaxUploadSize = blobSize }, new WriteStreamHelperStrategy());
                using (var fileStream = new ChecksumStream(streamLength, this._copyBufferSize.Value))
                {
                    var md5 = MD5.Create();
                    using (var md5Stream = new CryptoStream(fileStream, md5, CryptoStreamMode.Read))
                    {
                        putJob.Transfer(foo => md5Stream);

                        if (!md5Stream.HasFlushedFinalBlock)
                        {
                            md5Stream.FlushFinalBlock();
                        }

                        Assert.AreEqual("6pqugiiIUgxPkHfKKgq52A==", Convert.ToBase64String(md5.Hash));
                    }
                }

                // Test the GET
                var getJob = _helpers.StartReadAllJob(bucketName, helperStrategy: new ReadStreamHelperStrategy());
                using (Stream fileStream = new ChecksumStream(streamLength, this._copyBufferSize.Value))
                {
                    var md5 = MD5.Create();
                    using (var md5Stream = new CryptoStream(fileStream, md5, CryptoStreamMode.Write))
                    {
                        getJob.Transfer(foo => md5Stream);
                        if (!md5Stream.HasFlushedFinalBlock)
                        {
                            md5Stream.FlushFinalBlock();
                        }

                        Assert.AreEqual("6pqugiiIUgxPkHfKKgq52A==", Convert.ToBase64String(md5.Hash));
                    }
                }
            }
            finally
            {
                Ds3TestUtils.DeleteBucket(_client, bucketName);
            }
        }