internal bool ExistsWithBucketCheck(out bool bucketExists) { bucketExists = true; try { var request = new GetObjectMetadataRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); // If the object doesn't exist then a "NotFound" will be thrown s3Client.GetObjectMetadataAsync(request).Wait(); return(true); } catch (AmazonS3Exception e) { if (string.Equals(e.ErrorCode, "NoSuchBucket")) { bucketExists = false; return(false); } else if (string.Equals(e.ErrorCode, "NotFound")) { return(false); } throw; } }
internal bool ExistsWithBucketCheck(out bool bucketExists) { bucketExists = true; try { var request = new GetObjectMetadataRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; request.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; // If the object doesn't exist then a "NotFound" will be thrown s3Client.GetObjectMetadata(request); return true; } catch (AmazonS3Exception e) { if (string.Equals(e.ErrorCode, "NoSuchBucket")) { bucketExists = false; return false; } else if (string.Equals(e.ErrorCode, "NotFound")) { return false; } throw; } }
public void Flush(bool flushToS3) { if (this.canWrite && flushToS3 && this.lastWriteCounter != this.lastFlushMarker) { long pos = Position; try { if (!this.bucketExist) { file.S3Client.PutBucket(new PutBucketRequest().WithBucketName(file.BucketName).WithUseClientRegion(true)); this.bucketExist = true; } buffer.Seek(0, SeekOrigin.Begin); file.S3Client.PutObject((PutObjectRequest) new PutObjectRequest() .WithBucketName(file.BucketName) .WithKey(S3Helper.EncodeKey(file.ObjectKey)) .WithAutoCloseStream(false) .WithInputStream(buffer) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler)); this.lastFlushMarker = this.lastWriteCounter; } finally { buffer.Seek(pos, SeekOrigin.Begin); } } }
public void Flush(bool flushToS3) { if (this.canWrite && flushToS3 && this.lastWriteCounter != this.lastFlushMarker) { long pos = Position; try { if (!this.bucketExist) { //file.S3Client.PutBucket(new PutBucketRequest().WithBucketName(file.BucketName).WithUseClientRegion(true)); file.S3Client.PutBucket(new PutBucketRequest { BucketName = file.BucketName }); this.bucketExist = true; } //var request = (PutObjectRequest)new PutObjectRequest() // .WithBucketName(file.BucketName) // .WithKey(S3Helper.EncodeKey(file.ObjectKey)) // .WithAutoCloseStream(false) // .WithInputStream(buffer) // .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler); var request = new PutObjectRequest { BucketName = file.BucketName, Key = S3Helper.EncodeKey(file.ObjectKey), InputStream = buffer, AutoCloseStream = false }; request.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; try { buffer.Seek(0, SeekOrigin.Begin); file.S3Client.PutObject(request); } catch (AmazonS3Exception e) { if (!string.Equals(e.ErrorCode, "NoSuchBucket")) { throw; } // Bucket no longer exists so create and retry put file.Directory.Create(); buffer.Seek(0, SeekOrigin.Begin); file.S3Client.PutObject(request); } this.lastFlushMarker = this.lastWriteCounter; } finally { buffer.Seek(pos, SeekOrigin.Begin); } } }
/// <summary> /// Deletes the from S3. /// </summary> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> public void Delete() { if (Exists) { s3Client.DeleteObject(new DeleteObjectRequest() .WithBucketName(bucket) .WithKey(S3Helper.EncodeKey(key)) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as DeleteObjectRequest); Directory.Create(); } }
/// <summary> /// Enumerate the sub directories of this directory. /// </summary> /// <param name="searchPattern">The search string. The default pattern is "*", which returns all directories.</param> /// <param name="searchOption">One of the enumeration values that specifies whether the search operation should include only the current directory or all subdirectories. The default value is TopDirectoryOnly.</param> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>An enumerable collection of directories that matches searchPattern and searchOption.</returns> public IEnumerable <S3DirectoryInfo> EnumerateDirectories(string searchPattern, SearchOption searchOption) { IEnumerable <S3DirectoryInfo> folders = null; if (String.IsNullOrEmpty(bucket)) { var request = new ListBucketsRequest(); ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); Task <ListBucketsResponse> t = s3Client.ListBucketsAsync(request); t.Wait(); folders = t.Result.Buckets.ConvertAll(s3Bucket => new S3DirectoryInfo(s3Client, s3Bucket.BucketName, "")); } else { var request = new ListObjectsRequest { BucketName = bucket, Delimiter = "/", Prefix = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); folders = new EnumerableConverter <string, S3DirectoryInfo> ((IEnumerable <string>)(PaginatedResourceFactory.Create <string, ListObjectsRequest, ListObjectsResponse>(new PaginatedResourceInfo() .WithClient(s3Client) .WithMethodName("ListObjects") .WithRequest(request) .WithItemListPropertyPath("CommonPrefixes") .WithTokenRequestPropertyPath("Marker") .WithTokenResponsePropertyPath("NextMarker"))), prefix => new S3DirectoryInfo(s3Client, bucket, S3Helper.DecodeKey(prefix))); } //handle if recursion is set if (searchOption == SearchOption.AllDirectories) { IEnumerable <S3DirectoryInfo> foldersToAdd = new List <S3DirectoryInfo>(); foreach (S3DirectoryInfo dir in folders) { foldersToAdd = foldersToAdd.Concat(dir.EnumerateDirectories(searchPattern, searchOption)); } folders = folders.Concat(foldersToAdd); } //filter based on search pattern var regEx = WildcardToRegex(searchPattern); folders = folders.Where(s3dirInfo => Regex.IsMatch(s3dirInfo.Name, regEx, RegexOptions.IgnoreCase)); return(folders); }
/// <summary> /// Deletes the from S3. /// </summary> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> public void Delete() { if (Exists) { var deleteObjectRequest = new DeleteObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)deleteObjectRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.DeleteObjectAsync(deleteObjectRequest).Wait(); Directory.Create(); } }
/// <summary> /// Deletes the from S3. /// </summary> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> public void Delete() { if (Exists) { var deleteObjectRequest = new DeleteObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; deleteObjectRequest.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; s3Client.DeleteObject(deleteObjectRequest); Directory.Create(); } }
private void PopulateData() { using (Stream data = file.S3Client.GetObject(new GetObjectRequest() .WithBucketName(file.BucketName) .WithKey(S3Helper.EncodeKey(file.ObjectKey)) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as GetObjectRequest).ResponseStream) { byte[] tempBuffer = new byte[S3Constants.DefaultBufferSize]; int bytesRead = 0; while ((bytesRead = data.Read(tempBuffer, 0, tempBuffer.Length)) > 0) { buffer.Write(tempBuffer, 0, bytesRead); } } buffer.Position = 0; }
/// <summary> /// Copies this file to the location indicated by the passed in S3FileInfo. /// If the file already exists in S3 and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="file">The target location to copy this file to.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists in S3 and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>S3FileInfo of the newly copied file.</returns> public S3FileInfo CopyTo(S3FileInfo file, bool overwrite) { if (!overwrite) { if (file.Exists) { throw new IOException("File already exists"); } } if (SameClient(file)) { var request = new CopyObjectRequest { DestinationBucket = file.BucketName, DestinationKey = S3Helper.EncodeKey(file.ObjectKey), SourceBucket = bucket, SourceKey = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.CopyObject(request); } else { var getObjectRequest = new GetObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)getObjectRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); var getObjectResponse = s3Client.GetObject(getObjectRequest); using (Stream stream = getObjectResponse.ResponseStream) { var putObjectRequest = new PutObjectRequest { BucketName = file.BucketName, Key = S3Helper.EncodeKey(file.ObjectKey), InputStream = stream }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)putObjectRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); file.S3Client.PutObject(putObjectRequest); } } return(file); }
/// <summary> /// Enumerate the files of this directory. /// </summary> /// <param name="searchPattern">The search string. The default pattern is "*", which returns all files.</param> /// <param name="searchOption">One of the enumeration values that specifies whether the search operation should include only the current directory or all subdirectories. The default value is TopDirectoryOnly.</param> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>An enumerable collection of files that matches searchPattern and searchOption.</returns> public IEnumerable <S3FileInfo> EnumerateFiles(string searchPattern, SearchOption searchOption) { IEnumerable <S3FileInfo> files = null; if (String.IsNullOrEmpty(bucket)) { files = new List <S3FileInfo>(); } else { var request = new ListObjectsRequest { BucketName = bucket, Delimiter = "/", Prefix = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); PaginatedResourceInfo pagingInfo = new PaginatedResourceInfo().WithClient(s3Client) .WithMethodName("ListObjects") .WithRequest(request) .WithItemListPropertyPath("S3Objects") .WithTokenRequestPropertyPath("Marker") .WithTokenResponsePropertyPath("NextMarker"); files = new EnumerableConverter <S3Object, S3FileInfo> (((IEnumerable <S3Object>)(PaginatedResourceFactory.Create <S3Object, ListObjectsRequest, ListObjectsResponse>(pagingInfo))) .Where(s3Object => !String.Equals(S3Helper.DecodeKey(s3Object.Key), key, StringComparison.Ordinal) && !s3Object.Key.EndsWith("\\", StringComparison.Ordinal)), s3Object => new S3FileInfo(s3Client, bucket, S3Helper.DecodeKey(s3Object.Key))); } //handle if recursion is set if (searchOption == SearchOption.AllDirectories) { IEnumerable <S3DirectoryInfo> foldersToSearch = EnumerateDirectories(); foreach (S3DirectoryInfo dir in foldersToSearch) { files = files.Concat(dir.EnumerateFiles(searchPattern, searchOption)); } } //filter based on search pattern var regEx = WildcardToRegex(searchPattern); files = files.Where(s3fileInfo => Regex.IsMatch(s3fileInfo.Name, regEx, RegexOptions.IgnoreCase)); return(files); }
/// <summary> /// Copies the file from the local file system to S3. /// If the file already exists in S3 and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="srcFileName">Location of the file on the local file system to copy.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists in S3 and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>S3FileInfo where the file is copied to.</returns> public S3FileInfo CopyFromLocal(string srcFileName, bool overwrite) { if (!overwrite) { if (Exists) { throw new IOException("File already exists"); } } s3Client.PutObject(new PutObjectRequest() .WithBucketName(bucket) .WithKey(S3Helper.EncodeKey(key)) .WithFilePath(srcFileName) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as PutObjectRequest); return(this); }
/// <summary> /// Copies from S3 to the local file system. /// If the file already exists on the local file system and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="destFileName">The path where to copy the file to.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists locally and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>FileInfo for the local file where file is copied to.</returns> public FileInfo CopyToLocal(string destFileName, bool overwrite) { if (!overwrite) { if (new FileInfo(destFileName).Exists) { throw new IOException("File already exists"); } } s3Client.GetObject(new GetObjectRequest() .WithBucketName(bucket) .WithKey(S3Helper.EncodeKey(key)) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as GetObjectRequest) .WriteResponseStreamToFile(destFileName); return(new FileInfo(destFileName)); }
/// <summary> /// Creates the directory in S3. If no object key was specified when creating the S3DirectoryInfo then the bucket will be created. /// </summary> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> public void Create() { bool bucketExists; if (!ExistsWithBucketCheck(out bucketExists)) { if (String.IsNullOrEmpty(key)) { var request = new PutBucketRequest { BucketName = bucket }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.PutBucketAsync(request).Wait(); WaitTillBucketS3StateIsConsistent(true); } else { if (!bucketExists) { var request = new PutBucketRequest { BucketName = bucket }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.PutBucketAsync(request).Wait(); WaitTillBucketS3StateIsConsistent(true); } var putObjectRequest = new PutObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key), InputStream = new MemoryStream() }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)putObjectRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.PutObjectAsync(putObjectRequest).Wait(); } } }
/// <summary> /// Copies the file from the local file system to S3. /// If the file already exists in S3 and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="srcFileName">Location of the file on the local file system to copy.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists in S3 and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>S3FileInfo where the file is copied to.</returns> public S3FileInfo CopyFromLocal(string srcFileName, bool overwrite) { if (!overwrite) { if (Exists) { throw new IOException("File already exists"); } } var putObjectRequest = new PutObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key), FilePath = srcFileName }; putObjectRequest.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; s3Client.PutObject(putObjectRequest); return this; }
private void PopulateData() { var getObjectRequest = new GetObjectRequest { BucketName = file.BucketName, Key = S3Helper.EncodeKey(file.ObjectKey) }; getObjectRequest.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; var getObjectResponse = file.S3Client.GetObject(getObjectRequest); using (Stream data = getObjectResponse.ResponseStream) { byte[] tempBuffer = new byte[S3Constants.DefaultBufferSize]; int bytesRead = 0; while ((bytesRead = data.Read(tempBuffer, 0, tempBuffer.Length)) > 0) { buffer.Write(tempBuffer, 0, bytesRead); } } buffer.Position = 0; }
/// <summary> /// Copies from S3 to the local file system. /// If the file already exists on the local file system and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="destFileName">The path where to copy the file to.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists locally and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>FileInfo for the local file where file is copied to.</returns> public FileInfo CopyToLocal(string destFileName, bool overwrite) { if (!overwrite) { if (new FileInfo(destFileName).Exists) { throw new IOException("File already exists"); } } var getObjectRequest = new GetObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; getObjectRequest.BeforeRequestEvent += S3Helper.FileIORequestEventHandler; s3Client.GetObject(getObjectRequest) .WriteResponseStreamToFile(destFileName); return(new FileInfo(destFileName)); }
private void PopulateData() { var getObjectRequest = new GetObjectRequest { BucketName = file.BucketName, Key = S3Helper.EncodeKey(file.ObjectKey) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)getObjectRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); var getObjectResponse = file.S3Client.GetObject(getObjectRequest); using (Stream data = getObjectResponse.ResponseStream) { byte[] tempBuffer = new byte[AWSSDKUtils.DefaultBufferSize]; int bytesRead = 0; while ((bytesRead = data.Read(tempBuffer, 0, tempBuffer.Length)) > 0) { buffer.Write(tempBuffer, 0, bytesRead); } } buffer.Position = 0; }
/// <summary> /// Copies the file from the local file system to S3. /// If the file already exists in S3 and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="srcFileName">Location of the file on the local file system to copy.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists in S3 and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>S3FileInfo where the file is copied to.</returns> public S3FileInfo CopyFromLocal(string srcFileName, bool overwrite) { if (!overwrite) { if (Exists) { throw new IOException("File already exists"); } } var putObjectRequest = new PutObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key), FilePath = srcFileName }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)putObjectRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.PutObject(putObjectRequest); return(this); }
/// <summary> /// Copies this file to the location indicated by the passed in S3FileInfo. /// If the file already exists in S3 and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="file">The target location to copy this file to.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists in S3 and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>S3FileInfo of the newly copied file.</returns> public S3FileInfo CopyTo(S3FileInfo file, bool overwrite) { if (!overwrite) { if (file.Exists) { throw new IOException("File already exists"); } } if (SameClient(file)) { s3Client.CopyObject(new CopyObjectRequest() .WithDestinationBucket(file.BucketName) .WithDestinationKey(S3Helper.EncodeKey(file.ObjectKey)) .WithSourceBucket(bucket) .WithSourceKey(S3Helper.EncodeKey(key)) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as CopyObjectRequest); } else { using (Stream stream = s3Client.GetObject(new GetObjectRequest() .WithBucketName(bucket) .WithKey(S3Helper.EncodeKey(key)) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler) as GetObjectRequest) .ResponseStream) { file.S3Client.PutObject((PutObjectRequest) new PutObjectRequest() .WithBucketName(file.BucketName) .WithKey(S3Helper.EncodeKey(file.ObjectKey)) .WithInputStream(stream) .WithBeforeRequestHandler(S3Helper.FileIORequestEventHandler)); } } return(file); }
/// <summary> /// Copies from S3 to the local file system. /// If the file already exists on the local file system and overwrite is set to false than an ArgumentException is thrown. /// </summary> /// <param name="destFileName">The path where to copy the file to.</param> /// <param name="overwrite">Determines whether the file can be overwritten.</param> /// <exception cref="T:System.IO.IOException">If the file already exists locally and overwrite is set to false.</exception> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <returns>FileInfo for the local file where file is copied to.</returns> public FileInfo CopyToLocal(string destFileName, bool overwrite) { if (!overwrite) { if (new FileInfo(destFileName).Exists) { throw new IOException("File already exists"); } } var getObjectRequest = new GetObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)getObjectRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); using (var getObjectResponse = s3Client.GetObject(getObjectRequest)) { getObjectResponse.WriteResponseStreamToFile(destFileName); } return(new FileInfo(destFileName)); }
internal bool ExistsWithBucketCheck(out bool bucketExists) { bucketExists = true; try { if (String.IsNullOrEmpty(bucket)) { return(true); } else if (String.IsNullOrEmpty(key)) { var request = new GetBucketLocationRequest() { BucketName = bucket }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); try { s3Client.GetBucketLocationAsync(request).Wait(); return(true); } catch (AmazonS3Exception e) { if (string.Equals(e.ErrorCode, "NoSuchBucket")) { return(false); } throw; } } else { var request = new ListObjectsRequest() { BucketName = this.bucket, Prefix = S3Helper.EncodeKey(key), MaxKeys = 1 }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); Task <ListObjectsResponse> t = s3Client.ListObjectsAsync(request); t.Wait(); var response = t.Result; return(response.S3Objects.Count > 0); } } catch (AmazonS3Exception e) { if (string.Equals(e.ErrorCode, "NoSuchBucket")) { bucketExists = false; return(false); } else if (string.Equals(e.ErrorCode, "NotFound")) { return(false); } throw; } }
/// <summary> /// Deletes all the files in this directory as well as this directory. If recursive is set to true then all sub directories will be /// deleted as well. /// </summary> /// <exception cref="T:System.Net.WebException"></exception> /// <exception cref="T:Amazon.S3.AmazonS3Exception"></exception> /// <exception cref="T:Amazon.S3.Model.DeleteObjectsException"></exception> /// <param name="recursive">If true then sub directories will be deleted as well.</param> public void Delete(bool recursive) { if (String.IsNullOrEmpty(bucket)) { throw new NotSupportedException(); } if (recursive) { ListObjectsRequest listRequest = new ListObjectsRequest { BucketName = bucket, Prefix = S3Helper.EncodeKey(this.key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)listRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); DeleteObjectsRequest deleteRequest = new DeleteObjectsRequest { BucketName = bucket }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)deleteRequest).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); ListObjectsResponse listResponse = null; do { Task <ListObjectsResponse> t = s3Client.ListObjectsAsync(listRequest); t.Wait(); listResponse = t.Result; // Sort to make sure the Marker for paging is set to the last lexiographical key. foreach (S3Object s3o in listResponse.S3Objects.OrderBy(x => x.Key)) { deleteRequest.AddKey(s3o.Key); if (deleteRequest.Objects.Count == MULTIPLE_OBJECT_DELETE_LIMIT) { s3Client.DeleteObjectsAsync(deleteRequest).Wait(); deleteRequest.Objects.Clear(); } listRequest.Marker = s3o.Key; } } while (listResponse.IsTruncated); if (deleteRequest.Objects.Count > 0) { s3Client.DeleteObjectsAsync(deleteRequest).Wait(); } } if (String.IsNullOrEmpty(key) && Exists) { var request = new DeleteBucketRequest { BucketName = bucket }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.DeleteBucketAsync(request).Wait(); WaitTillBucketS3StateIsConsistent(false); } else { if (!EnumerateFileSystemInfos().GetEnumerator().MoveNext() && Exists) { var request = new DeleteObjectRequest { BucketName = bucket, Key = S3Helper.EncodeKey(key) }; ((Amazon.Runtime.Internal.IAmazonWebServiceRequest)request).AddBeforeRequestHandler(S3Helper.FileIORequestEventHandler); s3Client.DeleteObjectAsync(request).Wait(); Parent.Create(); } } }