public async Task put_object() { var settings = GetS3Settings(); using (var client = new RavenAwsS3Client(settings, DefaultConfiguration)) { var blobs = GenerateBlobNames(settings, 1, out _); Assert.Equal(1, blobs.Count); var key = blobs[0]; var value1 = Guid.NewGuid().ToString(); var value2 = Guid.NewGuid().ToString(); client.PutObject(key, new MemoryStream(Encoding.UTF8.GetBytes("231")), new Dictionary <string, string> { { "property1", value1 }, { "property2", value2 } }); var @object = await client.GetObjectAsync(key); Assert.NotNull(@object); using (var reader = new StreamReader(@object.Data)) Assert.Equal("231", reader.ReadToEnd()); var property1 = @object.Metadata.Keys.Single(x => x.Contains("property1")); var property2 = @object.Metadata.Keys.Single(x => x.Contains("property2")); Assert.Equal(value1, @object.Metadata[property1]); Assert.Equal(value2, @object.Metadata[property2]); } }
protected override async Task <ZipArchive> GetZipArchive(string filePath) { var blob = await _client.GetObjectAsync(filePath); var file = await RestoreBackupTaskBase.CopyRemoteStreamLocally(blob.Data, _configuration.TempPath); return(new DeleteOnCloseZipArchive(file, ZipArchiveMode.Read)); }
public async Task put_object(string region) { var bucketName = $"testing-{Guid.NewGuid()}"; var key = $"test-key-{Guid.NewGuid()}"; using (var client = new RavenAwsS3Client(GetS3Settings(region, bucketName))) { // make sure that the bucket doesn't exist client.DeleteBucket(); try { client.PutBucket(); var value1 = Guid.NewGuid().ToString(); var value2 = Guid.NewGuid().ToString(); client.PutObject(key, new MemoryStream(Encoding.UTF8.GetBytes("231")), new Dictionary <string, string> { { "property1", value1 }, { "property2", value2 } }); // can't delete a bucket with existing objects var e = Assert.Throws <StorageException>(() => client.DeleteBucket()); Assert.True(e.Message.Contains("The bucket you tried to delete is not empty")); var @object = await client.GetObjectAsync(key); Assert.NotNull(@object); using (var reader = new StreamReader(@object.Data)) Assert.Equal("231", reader.ReadToEnd()); var property1 = @object.Metadata.Keys.Single(x => x.Contains("property1")); var property2 = @object.Metadata.Keys.Single(x => x.Contains("property2")); Assert.Equal(value1, @object.Metadata[property1]); Assert.Equal(value2, @object.Metadata[property2]); } finally { client.DeleteObject(key); client.DeleteBucket(); } } }
protected override async Task <Stream> GetStream(string path) { var blob = await _client.GetObjectAsync(path); return(blob.Data); }
// ReSharper disable once InconsistentNaming private static async Task PutObject(string region, int sizeInMB, bool testBlobKeyAsFolder, UploadType uploadType) { var bucketName = $"testing-{Guid.NewGuid()}"; var key = testBlobKeyAsFolder == false? Guid.NewGuid().ToString() : $"{Guid.NewGuid()}/folder/testKey"; var progress = new Progress(); var maxUploadPutObjectInBytesSetter = ExpressionHelper.CreateFieldSetter <RavenAwsS3Client, int>("MaxUploadPutObjectSizeInBytes"); var minOnePartUploadSizeLimitInBytesSetter = ExpressionHelper.CreateFieldSetter <RavenAwsS3Client, int>("MinOnePartUploadSizeLimitInBytes"); using (var client = new RavenAwsS3Client(GetS3Settings(region, bucketName), progress)) { maxUploadPutObjectInBytesSetter(client, 10 * 1024 * 1024); // 10MB minOnePartUploadSizeLimitInBytesSetter(client, 7 * 1024 * 1024); // 7MB // make sure that the bucket doesn't exist client.DeleteBucket(); try { client.PutBucket(); var value1 = Guid.NewGuid().ToString(); var value2 = Guid.NewGuid().ToString(); var sb = new StringBuilder(); for (var i = 0; i < sizeInMB * 1024 * 1024; i++) { sb.Append("a"); } long streamLength; using (var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString()))) { streamLength = memoryStream.Length; client.PutObject(key, memoryStream, new Dictionary <string, string> { { "property1", value1 }, { "property2", value2 } }); } var @object = await client.GetObjectAsync(key); Assert.NotNull(@object); using (var reader = new StreamReader(@object.Data)) Assert.Equal(sb.ToString(), reader.ReadToEnd()); var property1 = @object.Metadata.Keys.Single(x => x.Contains("property1")); var property2 = @object.Metadata.Keys.Single(x => x.Contains("property2")); Assert.Equal(value1, @object.Metadata[property1]); Assert.Equal(value2, @object.Metadata[property2]); Assert.Equal(UploadState.Done, progress.UploadProgress.UploadState); Assert.Equal(uploadType, progress.UploadProgress.UploadType); Assert.Equal(streamLength, progress.UploadProgress.TotalInBytes); Assert.Equal(streamLength, progress.UploadProgress.UploadedInBytes); } finally { client.DeleteObject(key); client.DeleteBucket(); } } }
protected override async Task <ZipArchive> GetZipArchive(string filePath) { var blob = await _client.GetObjectAsync(filePath); return(new ZipArchive(blob.Data, ZipArchiveMode.Read)); }
// ReSharper disable once InconsistentNaming private async Task PutObject(int sizeInMB, bool testBlobKeyAsFolder, UploadType uploadType) { var settings = GetS3Settings(); var blobs = GenerateBlobNames(settings, 1, out _); Assert.Equal(1, blobs.Count); var key = $"{blobs[0]}"; if (testBlobKeyAsFolder) { key += "/"; } var progress = new Progress(); using (var client = new RavenAwsS3Client(settings, DefaultConfiguration, progress)) { client.MaxUploadPutObject = new Sparrow.Size(10, SizeUnit.Megabytes); client.MinOnePartUploadSizeLimit = new Sparrow.Size(7, SizeUnit.Megabytes); var value1 = Guid.NewGuid().ToString(); var value2 = Guid.NewGuid().ToString(); var sb = new StringBuilder(); for (var i = 0; i < sizeInMB * 1024 * 1024; i++) { sb.Append("a"); } long streamLength; using (var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString()))) { streamLength = memoryStream.Length; client.PutObject(key, memoryStream, new Dictionary <string, string> { { "property1", value1 }, { "property2", value2 } }); } var @object = await client.GetObjectAsync(key); Assert.NotNull(@object); using (var reader = new StreamReader(@object.Data)) Assert.Equal(sb.ToString(), reader.ReadToEnd()); var property1 = @object.Metadata.Keys.Single(x => x.Contains("property1")); var property2 = @object.Metadata.Keys.Single(x => x.Contains("property2")); Assert.Equal(value1, @object.Metadata[property1]); Assert.Equal(value2, @object.Metadata[property2]); Assert.Equal(UploadState.Done, progress.UploadProgress.UploadState); Assert.Equal(uploadType, progress.UploadProgress.UploadType); Assert.Equal(streamLength, progress.UploadProgress.TotalInBytes); Assert.Equal(streamLength, progress.UploadProgress.UploadedInBytes); } }
// ReSharper disable once InconsistentNaming private async Task PutObject(int sizeInMB, bool testBlobKeyAsFolder, UploadType uploadType, bool noAsciiDbName) { var settings = GetS3Settings(); var blobs = GenerateBlobNames(settings, 1, out _); Assert.Equal(1, blobs.Count); var key = ""; var progress = new Progress(); using (var client = new RavenAwsS3Client(settings, DefaultConfiguration, progress)) { client.MaxUploadPutObject = new Sparrow.Size(10, SizeUnit.Megabytes); client.MinOnePartUploadSizeLimit = new Sparrow.Size(7, SizeUnit.Megabytes); var property1 = "property1"; var property2 = "property2"; var value1 = Guid.NewGuid().ToString(); var value2 = Guid.NewGuid().ToString(); if (noAsciiDbName) { string dateStr = DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss"); key = $"{dateStr}.ravendb-żżżרייבן-A-backup/{dateStr}.ravendb-full-backup"; property1 = "Description-żżרייבן"; value1 = "ravendb-żżżרייבן-A-backup"; } else { key = $"{blobs[0]}"; } if (testBlobKeyAsFolder) { key += "/"; } var sb = new StringBuilder(); for (var i = 0; i < sizeInMB * 1024 * 1024; i++) { sb.Append("a"); } long streamLength; using (var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(sb.ToString()))) { streamLength = memoryStream.Length; client.PutObject(key, memoryStream, new Dictionary <string, string> { { property1, value1 }, { property2, value2 } }); } var @object = await client.GetObjectAsync(key); Assert.NotNull(@object); using (var reader = new StreamReader(@object.Data)) Assert.Equal(sb.ToString(), reader.ReadToEnd()); var property1check = @object.Metadata.Keys.Single(x => x.Contains(Uri.EscapeDataString(property1).ToLower())); var property2check = @object.Metadata.Keys.Single(x => x.Contains(property2)); Assert.Equal(Uri.EscapeDataString(value1), @object.Metadata[property1check]); Assert.Equal(value2, @object.Metadata[property2check]); Assert.Equal(UploadState.Done, progress.UploadProgress.UploadState); Assert.Equal(uploadType, progress.UploadProgress.UploadType); Assert.Equal(streamLength, progress.UploadProgress.TotalInBytes); Assert.Equal(streamLength, progress.UploadProgress.UploadedInBytes); } }