/// <summary> /// This will get the object and return the name of the temporary file it was written to. /// It is up to the caller to delete the temporary file /// </summary> public static string GetSingleObject(IDs3Client client, string bucketName, string objectName, int retries = 5) { string tempFilename = Path.GetTempFileName(); using (Stream fileStream = new FileStream(tempFilename, FileMode.Truncate, FileAccess.Write)) { IDs3ClientHelpers helper = new Ds3ClientHelpers(client, getObjectRetries: retries); var job = helper.StartReadJob(bucketName, new List<Ds3Object>{ new Ds3Object(objectName, null)}); job.Transfer(key => fileStream); return tempFilename; } }
public void TestEventEmiter() { const string bucketName = "eventEmitter"; try { var counter = 0; Ds3TestUtils.LoadTestData(_client, bucketName); var ds3ObjList = new List<Ds3Object> { new Ds3Object("beowulf.txt", null) }; var helpers = new Ds3ClientHelpers(_client); var job = helpers.StartReadJob(bucketName, ds3ObjList); job.ItemCompleted += item => { Console.WriteLine(@"Got completed event for " + item); counter++; }; job.Transfer(name => Stream.Null); Assert.AreEqual(1, counter); } finally { Ds3TestUtils.DeleteBucket(_client, bucketName); } }
public void TestGetBestEffort() { const string bucketName = "TestGetBestEffort"; try { var helpers = new Ds3ClientHelpers(this._client); helpers.EnsureBucketExists(bucketName); //Upload data for the test //3 files: 1 with 3 blobs, 1 with 2 blobs and 1 with 1 blob const int blobSize = 10485760; //10MB blob size var putJob = helpers.StartWriteJob(bucketName, Utils.Objects, ds3WriteJobOptions: new Ds3WriteJobOptions { MaxUploadSize = blobSize }); putJob.Transfer(Utils.ReadResource); //Getting the data back //1 blob will be missing from the 3 blobs object var getJob = helpers.StartReadJob(bucketName, Utils.Objects); var dataTransfers = new ConcurrentQueue<long>(); var itemsCompleted = new ConcurrentQueue<string>(); getJob.DataTransferred += dataTransfers.Enqueue; getJob.ItemCompleted += itemsCompleted.Enqueue; Assert.Throws<AggregateException>(() => getJob.Transfer(s => new MemoryStream())); CollectionAssert.AreEquivalent(new[] { 10485760, 10485760, 10485760, 8027314, 8160373 }, dataTransfers); //7229224 will have an exception CollectionAssert.AreEquivalent(Utils.Objects.Select(obj => obj.Name).Where(obj => !obj.Equals("3_blobs.txt")), itemsCompleted); } finally { Ds3TestUtils.DeleteBucket(_client, bucketName); } }
public void TestJobEvents() { const string bucketName = "TestJobEvents"; try { Ds3TestUtils.LoadTestData(_client, bucketName); var ds3ObjList = new List<Ds3Object> { new Ds3Object("beowulf.txt", null) }; var helpers = new Ds3ClientHelpers(_client); Ds3TestUtils.UsingAllStringReadStrategies(strategy => { var counter = 0; var dataTransfered = 0L; var job = helpers.StartReadJob(bucketName, ds3ObjList, strategy); job.ItemCompleted += item => { counter++; }; job.DataTransferred += item => { dataTransfered += item; }; job.Transfer(name => Stream.Null); Assert.AreEqual(1, counter); Assert.AreEqual(294059, dataTransfered); }); } finally { Ds3TestUtils.DeleteBucket(_client, bucketName); } }
/// <summary> /// This will get the object and return the name of the temporary file it was written to. /// It is up to the caller to delete the temporary file /// </summary> public static string GetSingleObject(IDs3Client client, string bucketName, string objectName, int retries = 5, IHelperStrategy<string> helperStrategy = null) { var tempFilename = Path.GetTempFileName(); using (Stream fileStream = new FileStream(tempFilename, FileMode.Truncate, FileAccess.Write)) { IDs3ClientHelpers helper = new Ds3ClientHelpers(client, objectTransferAttempts: retries); if (helperStrategy == null) { helperStrategy = new ReadRandomAccessHelperStrategy<string>(); } var job = helper.StartReadJob(bucketName, new List<Ds3Object> {new Ds3Object(objectName, null)}, helperStrategy: helperStrategy); job.Transfer(key => fileStream); return tempFilename; } }