static void ListingObjects() { try { ListObjectsRequest request = new ListObjectsRequest(); request.BucketName = bucketName; using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object entry in response.S3Objects) { Console.WriteLine("key = {0} size = {1}", entry.Key, entry.Size); } } // list only things starting with "foo" request.WithPrefix("foo"); using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object entry in response.S3Objects) { Console.WriteLine("key = {0} size = {1}", entry.Key, entry.Size); } } // list only things that come after "bar" alphabetically request.WithPrefix(null) .WithMarker("bar"); using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object entry in response.S3Objects) { Console.WriteLine("key = {0} size = {1}", entry.Key, entry.Size); } } // only list 3 things request.WithPrefix(null) .WithMarker(null) .WithMaxKeys(3); using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object entry in response.S3Objects) { Console.WriteLine("key = {0} size = {1}", entry.Key, entry.Size); } } } catch (AmazonS3Exception amazonS3Exception) { if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { Console.WriteLine("Please check the provided AWS Credentials."); Console.WriteLine("If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"); } else { Console.WriteLine("An error occurred with the message '{0}' when listing objects", amazonS3Exception.Message); } } }
public List <object> GetFileObjects() { AmazonS3 s3 = AWSClientFactory.CreateAmazonS3Client(); List <S3Object> Files = new List <S3Object>(); string nextMarker = ""; do { ListObjectsRequest lor = new ListObjectsRequest().WithBucketName(AmazonBucket).WithPrefix(AmazonPrefix).WithMarker(nextMarker); var response = s3.ListObjects(lor); if (response.IsTruncated) { nextMarker = response.NextMarker; } else { nextMarker = ""; } Files.AddRange(response.S3Objects); System.Diagnostics.Trace.WriteLine(string.Format("Added {0} files. Total files: {1}", response.S3Objects.Count, Files.Count)); } while (nextMarker != ""); return(Files.Cast <object>().ToList()); //var buckets = ComputeNode.Catalogs.Values.Cast<ICatalog>().Where(c => c.CatalogName == "WikipediaData").First().Buckets; //var bucketMods = buckets.Select(b => b.Value.BucketMod).ToList(); //myFiles = allFiles.Where(f => bucketMods.Contains(f.GetHashCode() % ComputeNode.GlobalBucketCount)).OrderBy(f => f.Key).ToList(); }
/// <summary> /// Enumerate all the children from a specified bucket which match the specified prefix. /// This is commonly used for enumerating "subfolders". /// </summary> /// <param name="bucket">The name of the bucket.</param> /// <param name="prefix">The desired prefix</param> /// <returns>Array of child keys.</returns> public string[] EnumerateChildren(string bucket, string prefix) { var request = new ListObjectsRequest { BucketName = bucket }; if (!string.IsNullOrEmpty(prefix)) { request.Prefix = prefix; } var result = new List <string>(); do { var response = _amazonS3.ListObjects(request); result.AddRange(response.S3Objects.Select(o => o.Key)); // Fetch the next page in case the response was truncated. if (response.IsTruncated) { request.Marker = response.NextMarker; } else { request = null; } } while (request != null); return(result.ToArray()); }
public override void CopyDirectory(string srcdomain, string srcdir, string newdomain, string newdir) { string srckey = MakePath(srcdomain, srcdir); string dstkey = MakePath(newdomain, newdir); //List files from src using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket, Prefix = srckey }; using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object s3Object in response.S3Objects) { if (QuotaController != null) { QuotaController.QuotaUsedAdd(_modulename, newdomain, _dataList.GetData(newdomain), s3Object.Size); } client.CopyObject(new CopyObjectRequest { SourceBucket = _bucket, SourceKey = s3Object.Key, DestinationBucket = _bucket, DestinationKey = s3Object.Key.Replace(srckey, dstkey), CannedACL = GetDomainACL(newdomain), ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256 }); } } } }
protected virtual long GetFolderSize(AmazonS3 client, string folderPath, string bucketName) { ListObjectsRequest request = new ListObjectsRequest(); request.WithBucketName(bucketName); request.WithPrefix(folderPath); long total = 0; do { ListObjectsResponse response = client.ListObjects(request); if (response != null && response.S3Objects != null) { total += response.S3Objects.Sum(s => s.Size); } if (response.IsTruncated) { request.Marker = response.NextMarker; } else { request = null; } } while (request != null); return(total); }
public override IVirtualDirectory GetDirectory(string virtualPath) { if (virtualPath == null) { return(null); } var dirPath = SanitizePath(virtualPath); if (string.IsNullOrEmpty(dirPath)) { return(RootDirectory); } var seekPath = dirPath[dirPath.Length - 1] != DirSep ? dirPath + DirSep : dirPath; var response = AmazonS3.ListObjects(new ListObjectsRequest { BucketName = BucketName, Prefix = seekPath, MaxKeys = 1, }); if (response.S3Objects.Count == 0) { return(null); } return(new S3VirtualDirectory(this, dirPath, GetParentDirectory(dirPath))); }
/// <summary> /// /////////////////////List all of objects in a bucket////////////////////// /// </summary> /// <param name="s3Client"></param> /// <returns></returns> public static string SliceNumbers(AmazonS3 s3Client, string cmap, string clipping) { //ListBucketsRequest buckr = new ListBucketsRequest(); //ListBucketsResponse response = s3Client.ListBuckets(buckr); ListObjectsRequest Lor = new ListObjectsRequest() { BucketName = cmap + "0", Prefix = clipping //with Delimiter is '/', it will not get folder. {we need just count files in a bucket which are not forlsers! //Delimiter= "/" }; ListObjectsResponse response1 = s3Client.ListObjects(Lor); if (response1.S3Objects.Count - 1 < 0) { return("0"); } else { return((response1.S3Objects.Count - 1).ToString()); } }
public string[] ListObjects() { try { using (AmazonS3 client = CreateS3Client()) { if (DoesBucketExist(client) == false) { throw new StorageException(String.Format("AWS S3 bucket [{0}] does not exist.", BucketName)); } using (ListObjectsResponse listObjectsResponse = client.ListObjects(new ListObjectsRequest { BucketName = BucketName })) return(listObjectsResponse.S3Objects.Select(o => (o.Key)).ToArray()); } } catch (AmazonS3Exception s3Ex) { throw AnAwsRelatedException(s3Ex); } catch (Exception ex) { throw new StorageException("An error occurred while processing your request.", ex); } }
public bool file_exists(string folderName, string fileName) { // It's allowed to have an empty folder name. // if (String.IsNullOrWhiteSpace(folderName)) throw new ArgumentNullException("folderName"); if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } folderName = (string.IsNullOrEmpty(folderName) ? String.Empty : folderName.Substring(folderName.Length - 1, 1) == "/" ? folderName : folderName + "/"); fileName = string.Format("{0}{1}", folderName, fileName); var request = new ListObjectsRequest(); request.WithBucketName(clientContext.BucketName); request.WithPrefix(fileName); using (AmazonS3 client = clientContext.create_instance()) { ListObjectsResponse response = wrap_request_in_error_handler(() => client.ListObjects(request)); var count = response.S3Objects.Count; if (count == 1) { return(true); } } return(false); }
public bool FileExists(string folderName, string fileName) { //folder ignored - packages stored on top level of S3 bucket if (String.IsNullOrWhiteSpace(folderName)) { throw new ArgumentNullException("folderName"); } if (String.IsNullOrWhiteSpace(fileName)) { throw new ArgumentNullException("fileName"); } var request = new ListObjectsRequest(); request.WithBucketName(clientContext.BucketName); request.WithPrefix(fileName); using (AmazonS3 client = clientContext.CreateInstance()) { ListObjectsResponse response = WrapRequestInErrorHandler(() => client.ListObjects(request)); var count = response.S3Objects.Count; if (count == 1) { return(true); } } return(false); }
public override void CopyDirectory(string srcdomain, string srcdir, string newdomain, string newdir) { string srckey = MakePath(srcdomain, srcdir); string dstkey = MakePath(newdomain, newdir); //List files from src using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket }; request.WithPrefix(srckey); using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object s3Object in response.S3Objects) { if (QuotaController != null) { QuotaController.QuotaUsedAdd(_modulename, newdomain, _dataList.GetData(newdomain), s3Object.Size); } client.CopyObject(new CopyObjectRequest() .WithSourceBucket(_bucket) .WithSourceKey(s3Object.Key) .WithDestinationBucket(_bucket) .WithDestinationKey(s3Object.Key.Replace(srckey, dstkey)).WithCannedACL( GetDomainACL(newdomain))); } } } }
public override void MoveDirectory(string srcdomain, string srcdir, string newdomain, string newdir) { string srckey = MakePath(srcdomain, srcdir); string dstkey = MakePath(newdomain, newdir); //List files from src using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket }; request.WithPrefix(srckey); using (ListObjectsResponse response = client.ListObjects(request)) { foreach (S3Object s3Object in response.S3Objects) { client.CopyObject(new CopyObjectRequest() .WithSourceBucket(_bucket) .WithSourceKey(s3Object.Key) .WithDestinationBucket(_bucket) .WithDestinationKey(s3Object.Key.Replace(srckey, dstkey)).WithCannedACL( GetDomainACL(newdomain))); client.DeleteObject(new DeleteObjectRequest().WithBucketName(_bucket).WithKey(s3Object.Key)); } } } }
/// <summary> /// Get Items /// </summary> /// <param name="container">Container</param> /// <param name="client">Client</param> /// <param name="path">Path</param> /// <returns>Storage Items</returns> private IEnumerable <IStorageItem> GetItems(CloudBlobContainer container, AmazonS3 client, string path) { if (null != container) { var options = new BlobRequestOptions() { UseFlatBlobListing = true, }; return(container.ListBlobs(options).Select(b => new Azure(container, b.Uri.ToString())).Where(c => c.Exists())); } else if (null != client) { var request = new ListObjectsRequest() { BucketName = path, }; using (var response = client.ListObjects(request)) { return(response.S3Objects.Select(s3 => new S3(client, path, s3.Key, s3.ETag))); } } else { return(this.GetFiles(path, path, new List <IStorageItem>())); } }
private void GetFileListing() { s3 = AWSClientFactory.CreateAmazonS3Client(); string nextMarker = ""; do { ListObjectsRequest lor = new ListObjectsRequest().WithBucketName(BUCKET).WithPrefix(PREFIX).WithMarker(nextMarker); var response = s3.ListObjects(lor); if (response.IsTruncated) { nextMarker = response.NextMarker; } else { nextMarker = ""; } allFiles.AddRange(response.S3Objects); Trace.WriteLine(string.Format("Added {0} files. Total files: {1}", response.S3Objects.Count, allFiles.Count)); } while (nextMarker != ""); var buckets = ComputeNode.Catalogs.Values.Cast <ICatalog>().Where(c => c.CatalogName == "WikipediaData").First().Buckets; var bucketMods = buckets.Select(b => b.Value.BucketMod).ToList(); myFiles = allFiles.Where(f => bucketMods.Contains(f.GetHashCode() % ComputeNode.GlobalBucketCount)).OrderBy(f => f.Key).ToList(); }
public void TestCleanup() { try { var objRequest = new ListObjectsRequest() { BucketName = this.bucketName }; using (var objResponse = client.ListObjects(objRequest)) { var delRequest = new DeleteObjectsRequest() { BucketName = this.bucketName, Quiet = true }; delRequest.AddKeys(objResponse.S3Objects.Select(o => new KeyVersion(o.Key)).ToArray()); using (var delResponse = client.DeleteObjects(delRequest)) { } } var deleteRequest = new DeleteBucketRequest() { BucketName = this.bucketName }; using (var deleteResponse = client.DeleteBucket(deleteRequest)) { } } catch (Exception ex) { this.TestContext.WriteLine("Warning: Could not cleanup bucket: {0}. {1}", this.bucketName, ex); } }
private bool DoesObjectExist(AmazonS3 client, string objectName) { using (ListObjectsResponse listObjectsResponse = client.ListObjects(new ListObjectsRequest { BucketName = BucketName })) return(listObjectsResponse.S3Objects.Any(o => (o.Key == objectName))); }
/// <summary> /// Sets the server side encryption method for the S3 Object's Version to the value /// specified. /// </summary> /// <param name="bucketName">The name of the bucket in which the key is stored</param> /// <param name="key">The key of the S3 Object</param> /// <param name="version">The version of the S3 Object</param> /// <param name="method">The server side encryption method</param> /// <param name="s3Client">The Amazon S3 Client to use for S3 specific operations.</param> /// <seealso cref="T:Amazon.S3.Model.S3StorageClass"/> public static void SetServerSideEncryption(string bucketName, string key, string version, ServerSideEncryptionMethod method, AmazonS3 s3Client) { if (null == s3Client) { throw new ArgumentNullException("s3Client", "Please specify an S3 Client to make service requests."); } // Get the existing ACL of the object GetACLRequest getACLRequest = new GetACLRequest(); getACLRequest.BucketName = bucketName; getACLRequest.Key = key; if (version != null) { getACLRequest.VersionId = version; } GetACLResponse getACLResponse = s3Client.GetACL(getACLRequest); ListObjectsResponse listObjectResponse = s3Client.ListObjects(new ListObjectsRequest() .WithBucketName(bucketName) .WithPrefix(key) .WithMaxKeys(1)); if (listObjectResponse.S3Objects.Count != 1) { throw new ArgumentNullException("No object exists with this bucket name and key."); } // Set the storage class on the object CopyObjectRequest copyRequest = new CopyObjectRequest(); copyRequest.SourceBucket = copyRequest.DestinationBucket = bucketName; copyRequest.SourceKey = copyRequest.DestinationKey = key; copyRequest.StorageClass = listObjectResponse.S3Objects[0].StorageClass == "STANDARD" ? S3StorageClass.Standard : S3StorageClass.ReducedRedundancy; if (version != null) { copyRequest.SourceVersionId = version; } copyRequest.ServerSideEncryptionMethod = method; // The copyRequest's Metadata directive is COPY by default CopyObjectResponse copyResponse = s3Client.CopyObject(copyRequest); // Set the object's original ACL back onto it because a COPY // operation resets the ACL on the destination object. SetACLRequest setACLRequest = new SetACLRequest(); setACLRequest.BucketName = bucketName; setACLRequest.Key = key; if (version != null) { setACLRequest.VersionId = copyResponse.VersionId; } setACLRequest.ACL = getACLResponse.AccessControlList; s3Client.SetACL(setACLRequest); }
/// <summary> /// Sets up the request needed to make an exact copy of the object leaving the parent method /// the ability to change just the attribute being requested to change. /// </summary> /// <param name="bucketName"></param> /// <param name="key"></param> /// <param name="version"></param> /// <param name="s3Client"></param> /// <param name="copyRequest"></param> /// <param name="setACLRequest"></param> static void SetupForObjectModification(string bucketName, string key, string version, AmazonS3 s3Client, out CopyObjectRequest copyRequest, out SetACLRequest setACLRequest) { // Get the existing ACL of the object GetACLRequest getACLRequest = new GetACLRequest(); getACLRequest.BucketName = bucketName; getACLRequest.Key = key; if (version != null) { getACLRequest.VersionId = version; } GetACLResponse getACLResponse = s3Client.GetACL(getACLRequest); // Set the object's original ACL back onto it because a COPY // operation resets the ACL on the destination object. setACLRequest = new SetACLRequest(); setACLRequest.BucketName = bucketName; setACLRequest.Key = key; setACLRequest.ACL = getACLResponse.AccessControlList; ListObjectsResponse listObjectResponse = s3Client.ListObjects(new ListObjectsRequest() .WithBucketName(bucketName) .WithPrefix(key) .WithMaxKeys(1)); if (listObjectResponse.S3Objects.Count != 1) { throw new ArgumentNullException("No object exists with this bucket name and key."); } GetObjectMetadataRequest getMetaRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse getMetaResponse = s3Client.GetObjectMetadata(getMetaRequest); // Set the storage class on the object copyRequest = new CopyObjectRequest(); copyRequest.SourceBucket = copyRequest.DestinationBucket = bucketName; copyRequest.SourceKey = copyRequest.DestinationKey = key; copyRequest.StorageClass = listObjectResponse.S3Objects[0].StorageClass == "STANDARD" ? S3StorageClass.Standard : S3StorageClass.ReducedRedundancy; if (version != null) { copyRequest.SourceVersionId = version; } copyRequest.WebsiteRedirectLocation = getMetaResponse.WebsiteRedirectLocation; copyRequest.ServerSideEncryptionMethod = getMetaResponse.ServerSideEncryptionMethod; }
public override bool IsFile(string domain, string path) { using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket }; request.Prefix = (MakePath(domain, path)); using (ListObjectsResponse response = client.ListObjects(request)) { return(response.S3Objects.Count > 0); } } }
private static void ExportAndImport(string folder,CloudBlobContainer container, AmazonS3 s3) { var listRequest = new ListObjectsRequest{ BucketName = ConfigurationManager.AppSettings["S3Bucket"], }.WithPrefix(folder); Console.WriteLine("Fetching all S3 object in " + folder); var s3response = s3.ListObjects(listRequest); //Checking if container exists, and creating it if not if (container.CreateIfNotExists()) { Console.WriteLine("Creating the blob container"); } foreach (var s3Item in s3response.S3Objects) { if (s3Item.Key == folder) { continue; } if (s3Item.Key.EndsWith("/")) { ExportAndImport(s3Item.Key, container, s3); continue; } Console.WriteLine("---------------------------------------------------"); var blockBlob = container.GetBlockBlobReference(s3Item.Key); Console.WriteLine("Blob: " + blockBlob.Uri.AbsoluteUri); var id = blockBlob.StartCopyFromBlob(new Uri("http://" + awsServiceUrl + "/" + s3Bucket + "/" + HttpUtility.UrlEncode(s3Item.Key)), null, null, null); bool continueLoop = true; while (continueLoop && id == string.Empty) { var copyState = blockBlob.CopyState; if (copyState != null) { var percentComplete = copyState.BytesCopied / copyState.TotalBytes; Console.WriteLine("Status of blob copy...." + copyState.Status + " " + copyState.TotalBytes + " of " + copyState.BytesCopied + "bytes copied. " + string.Format("{0:0.0%}", percentComplete)); if (copyState.Status != CopyStatus.Pending) { continueLoop = false; } } System.Threading.Thread.Sleep(1000); } } }
/// <summary> /// Deletes all keys in a given bucket, then deletes the bucket itself. /// </summary> /// <param name="client">The client to use.</param> /// <param name="bucketName">The bucket to delete.</param> public static void DeleteBucketRecursive(this AmazonS3 client, string bucketName) { while (true) { // attempt to delete the bucket try { var deleteRequest = new DeleteBucketRequest() { BucketName = bucketName }; using (var deleteResponse = client.DeleteBucket(deleteRequest)) { // deletion was successful return; } } catch (AmazonS3Exception ex) { if (ex.ErrorCode != "BucketNotEmpty") { throw ex; } } var objRequest = new ListObjectsRequest() { BucketName = bucketName }; using (var objResponse = client.ListObjects(objRequest)) { var delRequest = new DeleteObjectsRequest() { BucketName = bucketName, Quiet = true }; // select the objects to delete (up to the supported limit of 1000) var objToDelete = objResponse.S3Objects.Take(1000).Select(o => new KeyVersion(o.Key)); delRequest.AddKeys(objToDelete.ToArray()); using (var delResponse = client.DeleteObjects(delRequest)) { } } } }
public static void CleanBucket(string bucket, string aws_id, string aws_secret) { // set up client using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(aws_id, aws_secret)) { // check to ensure that the bucket actually exists var dirinfo = new Amazon.S3.IO.S3DirectoryInfo(client, bucket); if (dirinfo.Exists) { Console.WriteLine("Bucket \"{0}\" already exists. Erasing. Sorry if this isn't what you wanted, dude.", bucket); // get a list of the bucket's objects var lor = new ListObjectsRequest { BucketName = bucket }; using (ListObjectsResponse r = client.ListObjects(lor)) { if (r.S3Objects.Count > 0) { List <KeyVersion> objects = r.S3Objects.Select(obj => new KeyVersion(obj.Key)).ToList(); // batch-delete all the objects in the bucket DeleteObjectsRequest dor = new DeleteObjectsRequest { BucketName = bucket, Keys = objects }; client.DeleteObjects(dor); } } } else { Console.WriteLine("Creating new bucket \"{0}\"", bucket); // bucket doesn't exist; make a new one PutBucketRequest pbr = new PutBucketRequest { BucketName = bucket }; client.PutBucket(pbr); } } }
private long QuotaDelete(string domain, AmazonS3 client, string key) { if (QuotaController != null) { using ( ListObjectsResponse responce = client.ListObjects(new ListObjectsRequest().WithBucketName(_bucket).WithPrefix(key))) { if (responce.S3Objects != null && responce.S3Objects.Count > 0) { long size = Convert.ToInt64(responce.S3Objects[0].Size); QuotaController.QuotaUsedDelete(_modulename, domain, _dataList.GetData(domain), size); return(size); } } } return(0); }
public override long GetFileSize(string domain, string path) { using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket }; request.Prefix = (MakePath(domain, path)); using (ListObjectsResponse response = client.ListObjects(request)) { if (response.S3Objects.Count > 0) { return(response.S3Objects[0].Size); } throw new FileNotFoundException("file not found", path); } } }
public IEnumerable <SimpleFile> GetSimulationInputFiles() { // S3:URL in description string bucketName = "Simulations"; string key = job.SimulationId.ToString(); //string dest = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "name.bin"); IAWSContext awsCtx = Turbine.Consumer.AWS.AppUtility.GetContext(); byte[] bytes; using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(awsCtx.AccessKey, awsCtx.SecretKey)) { ListObjectsRequest listObjectsRequest = new ListObjectsRequest() .WithBucketName(bucketName) .WithDelimiter("/") .WithPrefix(String.Format("/{0}/StagedInputFiles/", key)); using (ListObjectsResponse listObjectsResponse = client.ListObjects(listObjectsRequest)) { foreach (S3Object obj in listObjectsResponse.S3Objects) { GetObjectRequest getObjectRequest = new GetObjectRequest() .WithBucketName(bucketName) .WithKey(String.Format("/{0}/StagedInputFiles/{1}", key, obj.Key)); using (S3Response getObjectResponse = client.GetObject(getObjectRequest)) { using (System.IO.Stream s = getObjectResponse.ResponseStream) { using (var ms = new System.IO.MemoryStream()) { s.CopyTo(ms); bytes = ms.ToArray(); } } } var f = new SimpleFile() { content = bytes, name = obj.Key }; yield return(f); } } } }
private static List <S3Object> GetS3Objects(AmazonS3 client, string bucket, string prefix) { var request = new ListObjectsRequest().WithBucketName(bucket).WithPrefix(prefix); request.WithMaxKeys(1000); var objects = new List <S3Object>(); ListObjectsResponse response = null; do { response = client.ListObjects(request); response.S3Objects.ForEach(entry => objects.Add(entry)); if (objects.Count == 0) { return(objects); } request.Marker = objects[objects.Count - 1].Key; } while (response.IsTruncated); return(objects); }
public ActionResult Index() { // get me all objects inside a given folder Dictionary <string, double> images = null; var request = new ListObjectsRequest(); request.BucketName = AWSBucket; request.WithPrefix(AWSFolder); using (AmazonS3 client = Amazon.AWSClientFactory.CreateAmazonS3Client(AWSAccessKey, AWSSecretKey)) { using (ListObjectsResponse response = client.ListObjects(request)) { images = response.S3Objects.Where(x => x.Key != AWSFolder).ToDictionary(obj => obj.Key, obj => AppHelper.ConvertBytesToMegabytes(obj.Size)); } } return(View(images)); }
private static void ListObjects(AmazonS3 s3Client, string bucket) { var request = new ListObjectsRequest(); request.WithBucketName(bucket) .WithPrefix("key") .WithMaxKeys(4); do { ListObjectsResponse response = s3Client.ListObjects(request); if (response.IsTruncated) { request.Marker = response.NextMarker; } else { request = null; } } while (request != null); }
private IEnumerable <S3Object> GetS3Objects(string domain, string path) { using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket }; request.WithPrefix(MakePath(domain, path.TrimEnd('/') + '/')).WithMaxKeys(1000); var objects = new List <S3Object>(); ListObjectsResponse response; do { response = client.ListObjects(request); objects.AddRange(response.S3Objects.Where(entry => CheckKey(domain, entry.Key))); if (objects.Count > 0) { request.Marker = objects[objects.Count - 1].Key; } } while (response.IsTruncated); return(objects); } }
private IEnumerable <S3Object> GetS3Objects(string domain, string path) { using (AmazonS3 client = GetClient()) { var request = new ListObjectsRequest { BucketName = _bucket, Prefix = (MakePath(domain, path.TrimEnd('/') + '/')), MaxKeys = (1000) }; var objects = new List <S3Object>(); ListObjectsResponse response; do { response = client.ListObjects(request); objects.AddRange(response.S3Objects.Where(entry => CheckKey(domain, entry.Key))); request.Marker = response.NextMarker; } while (response.IsTruncated); return(objects); } }
public IEnumerable <S3VirtualFile> EnumerateFiles(string prefix = null) { var response = AmazonS3.ListObjects(new ListObjectsRequest { BucketName = BucketName, Prefix = prefix, }); foreach (var file in response.S3Objects) { var filePath = SanitizePath(file.Key); var dirPath = GetDirPath(filePath); yield return(new S3VirtualFile(this, new S3VirtualDirectory(this, dirPath, GetParentDirectory(dirPath))) { FilePath = filePath, ContentLength = file.Size, FileLastModified = file.LastModified, Etag = file.ETag, }); } }
private void FindKeys(string BucketName, DeleteObjectsRequest deleteRequest, string SearchString, AmazonS3 Client) { ListObjectsRequest request = new ListObjectsRequest { BucketName = BucketName }; using (Client) { do { ListObjectsResponse response = Client.ListObjects(request); foreach (S3Object entry in response.S3Objects) { if (entry.Key.Contains(SearchString)) { Project.Log(Level.Info, "Deleting file: {0}", entry.Key); deleteRequest.AddKey(entry.Key, null); numKeys++; } } // If response is truncated, set the marker to get the next // set of keys. if (response.IsTruncated) { request.Marker = response.NextMarker; } else { request = null; } } while (request != null); } }
/// <summary> /// Sets up the request needed to make an exact copy of the object leaving the parent method /// the ability to change just the attribute being requested to change. /// </summary> /// <param name="bucketName"></param> /// <param name="key"></param> /// <param name="version"></param> /// <param name="s3Client"></param> /// <param name="copyRequest"></param> /// <param name="setACLRequest"></param> static void SetupForObjectModification(string bucketName, string key, string version, AmazonS3 s3Client, out CopyObjectRequest copyRequest, out SetACLRequest setACLRequest) { // Get the existing ACL of the object GetACLRequest getACLRequest = new GetACLRequest(); getACLRequest.BucketName = bucketName; getACLRequest.Key = key; if (version != null) getACLRequest.VersionId = version; GetACLResponse getACLResponse = s3Client.GetACL(getACLRequest); // Set the object's original ACL back onto it because a COPY // operation resets the ACL on the destination object. setACLRequest = new SetACLRequest(); setACLRequest.BucketName = bucketName; setACLRequest.Key = key; setACLRequest.ACL = getACLResponse.AccessControlList; ListObjectsResponse listObjectResponse = s3Client.ListObjects(new ListObjectsRequest() .WithBucketName(bucketName) .WithPrefix(key) .WithMaxKeys(1)); if (listObjectResponse.S3Objects.Count != 1) { throw new ArgumentNullException("No object exists with this bucket name and key."); } GetObjectMetadataRequest getMetaRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse getMetaResponse = s3Client.GetObjectMetadata(getMetaRequest); // Set the storage class on the object copyRequest = new CopyObjectRequest(); copyRequest.SourceBucket = copyRequest.DestinationBucket = bucketName; copyRequest.SourceKey = copyRequest.DestinationKey = key; copyRequest.StorageClass = listObjectResponse.S3Objects[0].StorageClass == "STANDARD" ? S3StorageClass.Standard : S3StorageClass.ReducedRedundancy; if (version != null) copyRequest.SourceVersionId = version; copyRequest.WebsiteRedirectLocation = getMetaResponse.WebsiteRedirectLocation; copyRequest.ServerSideEncryptionMethod = getMetaResponse.ServerSideEncryptionMethod; }
private long QuotaDelete(string domain, AmazonS3 client, string key) { if (QuotaController != null) { using ( ListObjectsResponse responce = client.ListObjects(new ListObjectsRequest().WithBucketName(_bucket).WithPrefix(key))) { if (responce.S3Objects != null && responce.S3Objects.Count > 0) { long size = Convert.ToInt64(responce.S3Objects[0].Size); QuotaController.QuotaUsedDelete(_modulename, domain, _dataList.GetData(domain), size); return size; } } } return 0; }
private void deleteFile(bool hasFile, string fileType, int id) { if (hasFile) { DeleteObjectRequest request = new DeleteObjectRequest(); request.WithBucketName("intelrecruiter"); ListObjectsRequest listObjReq = new ListObjectsRequest(); listObjReq.WithBucketName("intelrecruiter") .WithPrefix(fileType + "/" + id.ToString()); using (client = Amazon.AWSClientFactory.CreateAmazonS3Client("AKIAJ47VSG7WMA62WLCA", "3tqlHujlftpk6j/z5OtDw2eg9N2FJtz1RwL8bEa3")) { var results = client.ListObjects(listObjReq).S3Objects; foreach (var obj in results) { request.Key = obj.Key; client.DeleteObject(request); } } } }
/// <summary> /// Sets the server side encryption method for the S3 Object's Version to the value /// specified. /// </summary> /// <param name="bucketName">The name of the bucket in which the key is stored</param> /// <param name="key">The key of the S3 Object</param> /// <param name="version">The version of the S3 Object</param> /// <param name="method">The server side encryption method</param> /// <param name="s3Client">The Amazon S3 Client to use for S3 specific operations.</param> /// <seealso cref="T:Amazon.S3.Model.S3StorageClass"/> public static void SetServerSideEncryption(string bucketName, string key, string version, ServerSideEncryptionMethod method, AmazonS3 s3Client) { if (null == s3Client) { throw new ArgumentNullException("s3Client", "Please specify an S3 Client to make service requests."); } // Get the existing ACL of the object GetACLRequest getACLRequest = new GetACLRequest(); getACLRequest.BucketName = bucketName; getACLRequest.Key = key; if (version != null) getACLRequest.VersionId = version; GetACLResponse getACLResponse = s3Client.GetACL(getACLRequest); ListObjectsResponse listObjectResponse = s3Client.ListObjects(new ListObjectsRequest() .WithBucketName(bucketName) .WithPrefix(key) .WithMaxKeys(1)); if (listObjectResponse.S3Objects.Count != 1) { throw new ArgumentNullException("No object exists with this bucket name and key."); } // Set the storage class on the object CopyObjectRequest copyRequest = new CopyObjectRequest(); copyRequest.SourceBucket = copyRequest.DestinationBucket = bucketName; copyRequest.SourceKey = copyRequest.DestinationKey = key; copyRequest.StorageClass = listObjectResponse.S3Objects[0].StorageClass == "STANDARD" ? S3StorageClass.Standard : S3StorageClass.ReducedRedundancy; if (version != null) copyRequest.SourceVersionId = version; copyRequest.ServerSideEncryptionMethod = method; // The copyRequest's Metadata directive is COPY by default CopyObjectResponse copyResponse = s3Client.CopyObject(copyRequest); // Set the object's original ACL back onto it because a COPY // operation resets the ACL on the destination object. SetACLRequest setACLRequest = new SetACLRequest(); setACLRequest.BucketName = bucketName; setACLRequest.Key = key; if (version != null) setACLRequest.VersionId = copyResponse.VersionId; setACLRequest.ACL = getACLResponse.AccessControlList; s3Client.SetACL(setACLRequest); }
private bool DoesObjectExist(AmazonS3 client, string objectName) { using (ListObjectsResponse listObjectsResponse = client.ListObjects(new ListObjectsRequest {BucketName = BucketName})) return listObjectsResponse.S3Objects.Any(o => (o.Key == objectName)); }
private void GetFileListing() { s3 = AWSClientFactory.CreateAmazonS3Client(); string nextMarker = ""; do { ListObjectsRequest lor = new ListObjectsRequest().WithBucketName(BUCKET).WithPrefix(PREFIX).WithMarker(nextMarker); var response = s3.ListObjects(lor); if (response.IsTruncated) nextMarker = response.NextMarker; else nextMarker = ""; allFiles.AddRange(response.S3Objects); Trace.WriteLine(string.Format("Added {0} files. Total files: {1}", response.S3Objects.Count, allFiles.Count)); } while (nextMarker != ""); var buckets = ComputeNode.Catalogs.Values.Cast<ICatalog>().Where(c => c.CatalogName == "WikipediaData").First().Buckets; var bucketMods = buckets.Select(b => b.Value.BucketMod).ToList(); myFiles = allFiles.Where(f => bucketMods.Contains(f.GetHashCode() % ComputeNode.GlobalBucketCount)).OrderBy(f=>f.Key).ToList(); }
/// <summary> /// Get Items /// </summary> /// <param name="container">Container</param> /// <param name="client">Client</param> /// <param name="path">Path</param> /// <returns>Storage Items</returns> private IEnumerable<IStorageItem> GetItems(CloudBlobContainer container, AmazonS3 client, string path) { if (null != container) { var options = new BlobRequestOptions() { UseFlatBlobListing = true, }; return container.ListBlobs(options).Select(b => new Azure(container, b.Uri.ToString())).Where(c => c.Exists()); } else if (null != client) { var request = new ListObjectsRequest() { BucketName = path, }; using (var response = client.ListObjects(request)) { return response.S3Objects.Select(s3 => new S3(client, path, s3.Key, s3.ETag)); } } else { return this.GetFiles(path, path, new List<IStorageItem>()); } }