public override void CopyDirectory(string srcdomain, string srcdir, string newdomain, string newdir) { var srckey = MakePath(srcdomain, srcdir); var dstkey = MakePath(newdomain, newdir); //List files from src using var storage = GetStorage(); var options = new ListObjectsOptions(); var objects = storage.ListObjects(_bucket, srckey); foreach (var obj in objects) { storage.CopyObject(_bucket, srckey, _bucket, dstkey, new CopyObjectOptions { DestinationPredefinedAcl = GetDomainACL(newdomain) }); QuotaUsedAdd(newdomain, Convert.ToInt64(obj.Size)); } }
public async Task <ObjectList> ListObjectsAsync(Bucket bucket, ListObjectsOptions listObjectsOptions) { var listObjectsOptionsSWIG = listObjectsOptions.ToSWIG(); _listOptions.Add(listObjectsOptionsSWIG); using (SWIG.UplinkObjectIterator objectIterator = await Task.Run(() => SWIG.storj_uplink.uplink_list_objects(_access._project, bucket.Name, listObjectsOptionsSWIG)).ConfigureAwait(false)) { using (SWIG.UplinkError error = SWIG.storj_uplink.uplink_object_iterator_err(objectIterator)) { if (error != null && !string.IsNullOrEmpty(error.message)) { throw new BucketListException(error.message); } } ObjectList objectList = new ObjectList(); while (SWIG.storj_uplink.uplink_object_iterator_next(objectIterator)) { using (var objectResult = SWIG.storj_uplink.uplink_object_iterator_item(objectIterator)) { objectList.Items.Add(uplink.NET.Models.Object.FromSWIG(objectResult, true)); } } return(objectList); } }
public void ModifyRequest_AllOptions() { var request = new ListRequest(null, "bucket"); var options = new ListObjectsOptions { PageSize = 10, Delimiter = "/", IncludeTrailingDelimiter = true, Projection = Projection.Full, Versions = true, UserProject = "proj", PageToken = "nextpage", Fields = "items(name),nextPageToken" }; options.ModifyRequest(request); Assert.Equal(10, request.MaxResults); Assert.Equal("/", request.Delimiter); Assert.True(request.IncludeTrailingDelimiter); Assert.Equal(ProjectionEnum.Full, request.Projection); Assert.True(request.Versions); Assert.Equal("proj", request.UserProject); Assert.Equal("nextpage", request.PageToken); Assert.Equal("items(name),nextPageToken", request.Fields); }
public async Task AllObjects(int?pageSize) { var options = new ListObjectsOptions { PageSize = pageSize }; await AssertObjects(null, options, s_allObjectNames); }
public async Task AllObjects(int?pageSize) { var options = new ListObjectsOptions { PageSize = pageSize }; await AssertObjects(null, options, _fixture.ReadBucketObjects.ToArray()); }
public async Task PrefixAndDelimiter(string prefix, string expectedNames) { var options = new ListObjectsOptions { Delimiter = "/" }; await AssertObjects(prefix, options, expectedNames.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)); }
public Task <List <Object> > ListObjectsAsync(string prefix = null, string delimiter = null) { var option = new ListObjectsOptions { Delimiter = delimiter }; return(_client.ListObjectsAsync(_bucketName, prefix, options: delimiter == null ? null : option).ToList(CancellationToken)); }
private async Task AssertObjects(string prefix, ListObjectsOptions options, params string[] expectedNames) { var actual = s_config.Client.ListObjects(s_bucket, prefix, options); AssertObjectNames(actual, expectedNames); actual = await s_config.Client.ListAllObjectsAsync(s_bucket, prefix, options, CancellationToken.None); AssertObjectNames(actual, expectedNames); }
private async Task AssertObjects(string prefix, ListObjectsOptions options, params string[] expectedNames) { IEnumerable <Object> actual = _fixture.Client.ListObjects(_fixture.ReadBucket, prefix, options); AssertObjectNames(actual, expectedNames); actual = await _fixture.Client.ListObjectsAsync(_fixture.ReadBucket, prefix, options).ToListAsync(); AssertObjectNames(actual, expectedNames); }
public override PagedAsyncEnumerable <Objects, Object> ListObjectsAsync( string bucket, string prefix = null, ListObjectsOptions options = null) { var objects = ListObjectsInternal(bucket, prefix, options); return(new FakePagedAsyncEnumerable <Objects, Object>(new [] { objects }, os => os.Items)); }
public async Task InitAsync() { StartLoading(); //Fetch all UploadOperations var uploadOperations = (ActiveUploadOperations.Where(u => u.Key == BucketName)).FirstOrDefault(); if (uploadOperations.Value != null) { foreach (var uploadOperation in uploadOperations.Value) { if (!uploadOperation.Completed) { AddUploadOperation(uploadOperation); } } } //Fetch all DownloadOperations var downloadOperations = (ActiveDownloadOperations.Where(u => u.Key == BucketName)).FirstOrDefault(); if (downloadOperations.Value != null) { foreach (var downloadOperation in downloadOperations.Value) { if (!downloadOperation.Completed) { AddDownloadOperation(downloadOperation); } } } //Load all objects try { var bucket = await _bucketService.GetBucketAsync(BucketName); var listOptions = new ListObjectsOptions(); var objects = await _objectService.ListObjectsAsync(bucket, listOptions); foreach (var obj in objects.Items) { var entry = new BucketEntryViewModel(this, _bucketService, _objectService); entry.IsObject = true; entry.ObjectInfo = obj; Entries.Add(entry); } } catch (Exception ex) { Windows.UI.Popups.MessageDialog dialog = new Windows.UI.Popups.MessageDialog("Could not open bucket - " + ex.Message); await dialog.ShowAsync(); } DoneLoading(); }
public void ModifyRequest_DefaultOptions() { var request = new ListRequest(null, "bucket"); var options = new ListObjectsOptions(); options.ModifyRequest(request); Assert.Null(request.Delimiter); Assert.Null(request.Projection); Assert.Null(request.MaxResults); Assert.Null(request.Versions); }
//Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS",@"D:\CURRENT_PROJECT\VS2019\OMCatFireStore\OMCatFireStore\OMCat20-d5f2be34173e.json"); #endregion #region Helper private void ListObject(string bucketName, string prefix, string delimeter) { StringBuilder s = new StringBuilder(); List <StorageModel> storageModels = new List <StorageModel>(); // instantiates a client var storage = StorageClient.Create(); var option = new ListObjectsOptions() { Delimiter = delimeter }; foreach (var storageObject in storage.ListObjects(bucketName, prefix, option)) { //PictureBox pictureBox = new PictureBox(); //pictureBox.Size = new Size(150, 200); //pictureBox.SizeMode = PictureBoxSizeMode.StretchImage; //pictureBox.ImageLocation = ""; //s.AppendLine($"{storageObject.Name}: {storageObject.MediaLink }"); //txt.Invoke(new Action(() => { // txt.AppendText($"{storageObject.Id}\t\t: {storageObject.Bucket}\t\t : {storageObject.Name} \r\n"); //})); var bucket = storage.GetBucket(bucketName); storageModels.Add(new StorageModel() { Id = storageObject.Id, Bucket = storageObject.Bucket, Name = storageObject.Name, Link = storageObject.SelfLink, MediaLink = bucket.Website.ToString(), Kms = storageObject.KmsKeyName }); } dgv.DataSource = storageModels; /* * https://firebasestorage.googleapis.com/v0/b/omcat20.appspot.com/o/Product%2FBU440H.png?alt=media&token=b90e0f36-90b0-4906-8a11-8ad46fc1a1b8 * * gs://omcat20.appspot.com/Product/BU440H.png * * https://firebasestorage.googleapis.com/v0/b/omcat20.appspot.com/o/Product%2FBU440H.png?alt=media&token=b90e0f36-90b0-4906-8a11-8ad46fc1a1b8 */ //MessageBox.Show(s.ToString()); }
public List <Google.Apis.Storage.v1.Data.Object> GetBlobs(string bucketName, string prefix, string delimiter) { List <Google.Apis.Storage.v1.Data.Object> blobs = new List <Google.Apis.Storage.v1.Data.Object>(); var options = new ListObjectsOptions() { Delimiter = delimiter }; foreach (var blob in this.storage.ListObjects(bucketName, prefix, options)) { blobs.Add(blob); } return(blobs); }
public void ListObjects(string bucketName, string prefix, string delimiter) { var storage = StorageClient.Create(); var options = new ListObjectsOptions() { Delimiter = delimiter }; foreach (var storageObject in storage.ListObjects( bucketName, prefix, options)) { Console.WriteLine(storageObject.Name); } }
public async Task <List <Object> > ListObjectsAsync(string prefix = null, string delimiter = null) { var option = new ListObjectsOptions { Delimiter = delimiter }; var results = new List <Object>(); await foreach (var result in _client.ListObjectsAsync(_bucketName, prefix, options: delimiter == null ? null : option).WithCancellation(CancellationToken)) { results.Add(result); } return(results); }
public void ModifyRequest_DefaultOptions() { var request = new ListRequest(null, "bucket"); var options = new ListObjectsOptions(); options.ModifyRequest(request); Assert.Null(request.Delimiter); Assert.Null(request.IncludeTrailingDelimiter); Assert.Null(request.Projection); Assert.Null(request.MaxResults); Assert.Null(request.Versions); Assert.Null(request.UserProject); Assert.Null(request.PageToken); Assert.Null(request.StartOffset); Assert.Null(request.EndOffset); }
public void ModifyRequest_AllOptions() { var request = new ListRequest(null, "bucket"); var options = new ListObjectsOptions { PageSize = 10, Delimiter = "/", Projection = Projection.Full, Versions = true }; options.ModifyRequest(request); Assert.Equal(10, request.MaxResults); Assert.Equal("/", request.Delimiter); Assert.Equal(ProjectionEnum.Full, request.Projection); Assert.True(request.Versions); }
public void PartialResponses() { var options = new ListObjectsOptions { Fields = "items(name,contentType),nextPageToken" }; var objects = _fixture.Client.ListObjects(_fixture.ReadBucket, options: options).ToList(); foreach (var obj in objects) { // These fields are requested Assert.NotNull(obj.Name); Assert.NotNull(obj.ContentType); // These are not Assert.Null(obj.ContentEncoding); Assert.Null(obj.ContentDisposition); } }
/// <summary> /// Prefixes and delimiters can be used to emulate directory listings. /// Prefixes can be used to filter objects starting with prefix. /// The delimiter argument can be used to restrict the results to only the /// objects in the given "directory". Without the delimiter, the entire tree /// under the prefix is returned. /// For example, given these objects: /// a/1.txt /// a/b/2.txt /// /// If you just specify prefix="a/", you'll get back: /// a/1.txt /// a/b/2.txt /// /// However, if you specify prefix="a/" and delimiter="/", you'll get back: /// a/1.txt /// </summary> /// <param name="bucketName">The bucket to list the objects from.</param> /// <param name="prefix">The prefix to match. Only objects with names that start with this string will /// be returned. This parameter may be null or empty, in which case no filtering /// is performed.</param> /// <param name="delimiter">Used to list in "directory mode". Only objects whose names (aside from the prefix) /// do not contain the delimiter will be returned.</param> public IEnumerable <Google.Apis.Storage.v1.Data.Object> ListFilesWithPrefix( string bucketName = "your-unique-bucket-name", string prefix = "your-prefix", string delimiter = "your-delimiter") { var storage = StorageClient.Create(); var options = new ListObjectsOptions { Delimiter = delimiter }; var storageObjects = storage.ListObjects(bucketName, prefix, options); Console.WriteLine($"Objects in bucket {bucketName} with prefix {prefix}:"); foreach (var storageObject in storageObjects) { Console.WriteLine(storageObject.Name); } return(storageObjects); }
public async Task <IEnumerable <string> > ListFilesAsync(int pageSize = 100, bool pagingEnabled = true) { var options = new ListObjectsOptions() { PageSize = pageSize, }; if (pagingEnabled) { options.PageToken = _pagingToken; } Page <Object> page = await _storageClient.ListObjectsAsync(_configuration.BucketName, "", options) .ReadPageAsync(pageSize); _pagingToken = page.NextPageToken; return(page.Select(x => x.Name)); }
/// <summary> /// Returns the list of restore points for a specific object /// </summary> public ObjectVersion[] GetVersions(string filename) { try { using (StorageClient _client = StorageClient.Create(GoogleCredential.FromFile(_apiKey))) { ListObjectsOptions options = new ListObjectsOptions(); options.Versions = true; string fmtObject = $"{filename}.encrypted"; var list = _client.ListObjects(_bucketName, fmtObject, options).ToList(); // Be sure the object exists if ((list == null) || (list.Count() == 0)) { _logger.WriteLog(ErrorCodes.GcsObjectRestore_ObjectNotFound, string.Format(ErrorResources.GcsObjectRestore_ObjectNotFound, filename), Severity.Error, VerboseLevel.User); return(null); } // Build a list of objects found return(list.Select(item => new ObjectVersion() { Name = item.Name, TimeCreated = item.TimeCreated.GetValueOrDefault(), StorageClass = item.StorageClass, Size = (long)item.Size.GetValueOrDefault(), VersionId = item.Generation }) .OrderByDescending(a => a.TimeCreated) .ToArray()); } } catch (Exception ex) { _logger.WriteLog(ErrorCodes.GcsObjectRestore_GetVersionsException, ErrorResources.GcsObjectRestore_GetVersionsException + Environment.NewLine + ex.Message, Severity.Error, VerboseLevel.User); return(null); } }
public void ResumeWithPageToken() { string bucket = _fixture.ReadBucket; var client = _fixture.Client; var totalCount = _fixture.ReadBucketObjects.Count(); // We want to check that when reading the remained, we still need to paginate. // (If we had a bug that always used the original page token, we'd end up in an infinite loop.) Assert.True(totalCount > 4, "Must have more than 4 objects for pagination test"); var firstPage = client.ListObjects(bucket).ReadPage(2); Assert.NotNull(firstPage.NextPageToken); var options = new ListObjectsOptions { PageSize = 2, PageToken = firstPage.NextPageToken }; var remainder = client.ListObjects(bucket, prefix: null, options).ToList(); Assert.Equal(totalCount - 2, remainder.Count); }
public async Task <ObjectList> ListObjectsAsync(Bucket bucket, ListObjectsOptions listObjectsOptions) { SWIG.ObjectIterator objectIterator = await Task.Run(() => SWIG.storj_uplink.list_objects(_access._project, bucket.Name, listObjectsOptions.ToSWIG())); SWIG.Error error = SWIG.storj_uplink.object_iterator_err(objectIterator); if (error != null && !string.IsNullOrEmpty(error.message)) { throw new BucketListException(error.message); } SWIG.storj_uplink.free_error(error); ObjectList objectList = new ObjectList(); while (SWIG.storj_uplink.object_iterator_next(objectIterator)) { objectList.Items.Add(uplink.NET.Models.Object.FromSWIG(SWIG.storj_uplink.object_iterator_item(objectIterator), true)); } SWIG.storj_uplink.free_object_iterator(objectIterator); return(objectList); }
/// <summary>Gets all top-level XML elements in the repository.</summary> /// <remarks>All top-level elements in the repository.</remarks> public IReadOnlyCollection <XElement> GetAllElements() { ListObjectsOptions options = new ListObjectsOptions() { PageSize = Int32.MaxValue }; List <XElement> elements = new List <XElement>(); foreach (Object @object in _client.ListObjects(_bucketName, null, options)) { using (var stream = new MemoryStream()) { _client.DownloadObject(@object, stream); stream.Position = 0; XElement element = XElement.Load(stream); elements.Add(element); } } return(elements.AsReadOnly()); }
public static async Task DownloadBingImagesAsync( string imageKeyword, string outputBucket, string directoryHash, int tileSize, ILogger logger) { var httpClient = new HttpClient(); var storage = StorageClient.Create(); var options = new ListObjectsOptions() { Delimiter = "/" }; var zipFilename = $"{directoryHash}.zip"; var cacheDir = Path.Combine(Path.GetTempPath(), "MosaicCache", directoryHash); var zipPath = Path.Combine(Path.GetTempPath(), "MosaicCache", zipFilename); Directory.CreateDirectory(cacheDir); var objects = storage.ListObjects(outputBucket, zipFilename, null); if (File.Exists(zipPath) || objects.Count() > 0) { logger.LogInformation($"Zipfile already exists, skipping Bing image download"); return; } var imageUrls = await DownloadImages.GetImageResultsAsync(imageKeyword, logger); foreach (var url in imageUrls) { try { var resizedUrl = $"{url}&w={tileSize}&h={tileSize}&c=7"; var queryString = HttpUtility.ParseQueryString(new Uri(url).Query); var imageId = queryString["id"] + ".jpg"; var filePath = Path.Combine(cacheDir, imageId); using (var responseStream = await httpClient.GetStreamAsync(resizedUrl)) { logger.LogInformation($"Downloading blob: {filePath}"); using (var outputFileStream = File.Create(filePath)) { responseStream.CopyTo(outputFileStream); } } } catch (Exception e) { logger.LogInformation($"Exception downloading blob: {e.Message}"); continue; } } ZipFile.CreateFromDirectory(cacheDir, zipPath); using (var zipStream = File.Open(zipPath, FileMode.Open)) { if (zipStream != null) { storage.UploadObject(outputBucket, zipFilename, null, zipStream); } else { logger.LogError($"Zip file {zipPath} does not exist!"); } } }
internal static global::System.Runtime.InteropServices.HandleRef getCPtr(ListObjectsOptions obj) { return((obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr); }
// [END storage_list_files] // [START storage_list_files_with_prefix] private void ListObjects(string bucketName, string prefix, string delimiter) { var storage = StorageClient.Create(); var options = new ListObjectsOptions() { Delimiter = delimiter }; foreach (var storageObject in storage.ListObjects( bucketName, prefix, options)) { Console.WriteLine(storageObject.Name); } }
public static ObjectIterator list_objects(Project p0, string p1, ListObjectsOptions p2) { global::System.IntPtr cPtr = storj_uplinkPINVOKE.list_objects(Project.getCPtr(p0), p1, ListObjectsOptions.getCPtr(p2)); ObjectIterator ret = (cPtr == global::System.IntPtr.Zero) ? null : new ObjectIterator(cPtr, false); return(ret); }