public ImageUploader(string bucketName) { _bucketName = bucketName; // [START storageclient] _storageClient = StorageClient.Create(); // [END storageclient] }
public ImageUploader(string bucketName, string applicationName) { _bucketName = bucketName; // [START storageclient] _storageClient = StorageClient .FromApplicationCredentials(applicationName).Result; // [END storageclient] }
/// <summary> /// Update the specified service properties according to the input /// </summary> /// <param name="logging">Service properties</param> internal void UpdateServiceProperties(StorageClient.LoggingProperties logging) { if (Version != null) { logging.Version = Version.ToString(); } if (RetentionDays != null) { if (RetentionDays == -1) { //Disable logging retention policy logging.RetentionDays = null; } else if (RetentionDays < 1 || RetentionDays > 365) { throw new ArgumentException(string.Format(Resources.InvalidRetentionDay, RetentionDays)); } else { logging.RetentionDays = RetentionDays; } } if (LoggingOperations != null && LoggingOperations.Length > 0) { StorageClient.LoggingOperations logOperations = default(StorageClient.LoggingOperations); for (int i = 0; i < LoggingOperations.Length; i++) { if (LoggingOperations[i] == StorageClient.LoggingOperations.None || LoggingOperations[i] == StorageClient.LoggingOperations.All) { if (LoggingOperations.Length > 1) { throw new ArgumentException(Resources.NoneAndAllOperationShouldBeAlone); } } logOperations |= LoggingOperations[i]; } logging.LoggingOperations = logOperations; // Set default logging version if (string.IsNullOrEmpty(logging.Version)) { string defaultLoggingVersion = StorageNouns.DefaultLoggingVersion; logging.Version = defaultLoggingVersion; } } }
public async Task <ActionResult <Asset> > GetAsset(int id) { var storageConnectionString = _configuration["ConnectionStrings:AzureStorageConnectionString"]; var storage = new StorageClient(storageConnectionString); var asset = await _context.Asset .Include(asset => asset.AssetVariants) .ThenInclude(av => av.VariantType) .FirstOrDefaultAsync(asset => asset.AssetId == id); var blobClient = storage.GetFile("assets", asset.FileName); foreach (KeyValuePair <string, string> entry in blobClient.GetProperties().Value.Metadata) { asset.Metadata.Add(new AssetMetadata(entry.Key, entry.Value)); } if (asset == null) { return(NotFound()); } return(asset); }
public async Task <IActionResult> Delete(int?id) { StorageClient storageClient = StorageClient.Create(); string bucketName = "garrettcmstest"; if (id == null) { return(NotFound()); } var pageElToDelete = await _context.PageElements .SingleOrDefaultAsync(p => p.ID == id); if (pageElToDelete == null) { return(NotFound()); } try { // if its an image delete from google cloud if (pageElToDelete.ImagePath != null) { await storageClient.DeleteObjectAsync(bucketName, pageElToDelete.FileName); } _context.PageElements.Remove(pageElToDelete); await _context.SaveChangesAsync(); return(RedirectToAction("Edit", "Page", new { id = pageElToDelete.PageID })); } catch (DbUpdateException /* ex */) { //Log the error (uncomment ex variable name and write a log.) return(RedirectToAction("Delete", new { id = id, saveChangesError = true })); } }
public async Task<bool> deleteEventImageFromCloud(ImageEntity imageEntity) { bool imageDeleted = false; try { if (imageEntity != null) { string bucketName = "eventsimages1"; string googleCloudEnvVar = SetEnvironmentVariable.getGoogleCloudEnvironmentVariable(); GoogleCredential credential = GoogleCredential.FromFile(googleCloudEnvVar); var storageClient = StorageClient.Create(credential); await storageClient.DeleteObjectAsync(bucketName, imageEntity.link); Console.WriteLine("uploaded the file successfully"); imageDeleted = true; } } catch (Exception ex) { throw ex; } return imageDeleted; }
public static object AuthImplicit(string projectId) { var credential = GoogleCredential.FromFile(serviceAccountFile); var storage = StorageClient.Create(credential); string bucketName = projectId + "-capturetest"; try { storage.CreateBucket(projectId, bucketName); } catch (Google.GoogleApiException e) when(e.Error.Code == 409) { ErrorHandler.AddMessage(bucketName + " exists."); } var buckets = storage.ListBuckets(projectId); foreach (var bucket in buckets) { ErrorHandler.AddMessage(bucket.Name); } return(null); }
public void ConfigureSagaSnapshots() { // Use a single instance of the storage client var storageClient = StorageClient.Create(); var activator = new BuiltinHandlerActivator(); Using(activator); Configure.With(activator) .Transport(t => t.UseInMemoryTransport(new InMemNetwork(), "api")) .Sagas(s => s.StoreInMemory()) .Options(o => { var options = new GoogleCloudStorageSagaSnapshotOptions("my-project-id", "my-bucket") { DoNotUpdateLastReadTime = true, AutoCreateBucket = false, ObjectKeyPrefix = "my-snapshots-folder/", }; o.EnableSagaAuditing().StoreInGoogleCloudStorage(storageClient, options); }) .Start(); }
public void ConfigureDataBus() { // Use a single instance of the storage client var storageClient = StorageClient.Create(); var activator = new BuiltinHandlerActivator(); Using(activator); Configure.With(activator) .Transport(t => t.UseInMemoryTransport(new InMemNetwork(), "api")) .DataBus(d => { var options = new GoogleCloudStorageDataBusOptions("my-project-id", "my-bucket") { DoNotUpdateLastReadTime = true, AutoCreateBucket = false, ObjectKeyPrefix = "my-prefix", ObjectKeySuffix = ".my-suffix", }; d.StoreInGoogleCloudStorage(storageClient, options); }) .Start(); }
public bool DeleteObjects(IEnumerable <string> objectNames) { try { string bucketName = WebConfiguration.bucketName; GoogleCredential googleCredential; var req = WebRequest.Create(WebConfiguration.googlecredentialsfile); using (Stream m = req.GetResponse().GetResponseStream()) googleCredential = GoogleCredential.FromStream(m); var storage = StorageClient.Create(googleCredential); foreach (string objectName in objectNames) { storage.DeleteObject(bucketName, objectName); } return(true); } catch (Exception ex) { return(false); } }
/// <summary> /// Configures the data protection system to persist keys in Google Cloud Storage. /// </summary> /// <remarks> /// <para> /// If <paramref name="client"/> is null, the client is constructed as follows: /// <list type="bullet"> /// <item><description>If a <c>StorageClient</c> is configured via dependency injection, that is used.</description></item> /// <item><description>If a <c>GoogleCredential</c> is configured via dependency injection, that is used to construct a <c>StorageClient</c>.</description></item> /// <item><description>Otherwise, the default credentials are used to construct a <c>StorageClient</c>.</description></item> /// </list> /// </para> /// </remarks> /// <param name="builder">The data protection builder to configure. Must not be null.</param> /// <param name="bucketName">The name of the bucket in which to store the object containing the keys. Must not be null.</param> /// <param name="objectName">The name of the object in which to store the keys. Must not be null.</param> /// <param name="client">The Google Cloud Storage client to use for network requests. May be null, in which case the client will /// be fetched from dependency injection or created with the default credentials.</param> /// <returns>The same builder, for chaining purposes.</returns> public static IDataProtectionBuilder PersistKeysToGoogleCloudStorage( this IDataProtectionBuilder builder, string bucketName, string objectName, StorageClient client) { GaxPreconditions.CheckNotNull(builder, nameof(builder)); GaxPreconditions.CheckNotNull(bucketName, nameof(bucketName)); GaxPreconditions.CheckNotNull(objectName, nameof(objectName)); builder.Services.AddSingleton <IConfigureOptions <KeyManagementOptions> >(services => { if (client == null) { client = services.GetService <StorageClient>(); if (client == null) { var credential = services.GetService <GoogleCredential>(); // If credential is null, this will use the default credentials automatically. client = StorageClient.Create(credential); } } return(new ConfigureOptions <KeyManagementOptions>(options => options.XmlRepository = new CloudStorageXmlRepository(client, bucketName, objectName))); }); return(builder); }
private async Task <IList <XElement> > GetAllElementsAsync(StorageClient storageClient) { var data = await GetLatestDataAsync(storageClient); if (data == null) { // no data in blob storage return(new XElement[0]); } // The document will look like this: // // <root> // <child /> // <child /> // ... // </root> // // We want to return the first-level child elements to our caller. var doc = CreateDocumentFromBlob(data.BlobContents); return(doc.Root.Elements().ToList()); }
private async Task CreateCollage(List <MemoryStream> memoryStreams, string bucket, ILogger logger) { memoryStreams.ForEach(memoryStream => memoryStream.Position = 0); using (var img0 = Image.Load(memoryStreams[0])) using (var img1 = Image.Load(memoryStreams[1])) using (var img2 = Image.Load(memoryStreams[2])) using (var img3 = Image.Load(memoryStreams[3])) using (var outputStream = new MemoryStream()) using (var outputImage = new Image <Rgba32>(800, 800)) { outputImage.Mutate(o => o .DrawImage(img0, new Point(0, 0), 1f) .DrawImage(img1, new Point(400, 0), 1f) .DrawImage(img2, new Point(0, 400), 1f) .DrawImage(img3, new Point(400, 400), 1f) ); outputImage.SaveAsPng(outputStream); logger.LogInformation("Created local collage picture"); var client = await StorageClient.CreateAsync(); await client.UploadObjectAsync(bucket, "collage.png", "image/png", outputStream); } }
public void CopyObject() { var projectId = _fixture.ProjectId; var sourceBucket = _fixture.BucketName; var destinationBucket = IdGenerator.FromGuid(); StorageClient.Create().CreateBucket(projectId, destinationBucket); StorageSnippetFixture.SleepAfterBucketCreateDelete(); _fixture.RegisterBucketToDelete(destinationBucket); // Snippet: CopyObject StorageClient client = StorageClient.Create(); string sourceName = "greetings/hello.txt"; string destinationName = "copy.txt"; // This method actually uses the "rewrite" API operation, for added reliability // when copying large objects across locations, storage classes or encryption keys. client.CopyObject(sourceBucket, sourceName, destinationBucket, destinationName); // End snippet var obj = client.GetObject(destinationBucket, destinationName); Assert.Equal((ulong)Encoding.UTF8.GetByteCount(_fixture.HelloWorldContent), obj.Size.Value); }
public StorageFixture() { Client = StorageClient.Create(); BucketPrefix = "tests-" + Guid.NewGuid().ToString().ToLowerInvariant().Replace("-", "") + "-"; LargeContent = Encoding.UTF8.GetBytes(string.Join("\n", Enumerable.Repeat("All work and no play makes Jack a dull boy.", 500))); CreateBucket(SingleVersionBucket, false); CreateBucket(MultiVersionBucket, true); CreateAndPopulateReadBucket(); CreateBucket(BucketBeginningWithZ, false); CreateBucket(LabelsTestBucket, false); CreateBucket(InitiallyEmptyBucket, false); RequesterPaysClient = CreateRequesterPaysClient(); if (RequesterPaysClient != null) { RequesterPaysProjectId = Environment.GetEnvironmentVariable(RequesterPaysProjectEnvironmentVariable); if (string.IsNullOrEmpty(RequesterPaysProjectId)) { throw new Exception($"{RequesterPaysCredentialsEnvironmentVariable} set, but not {RequesterPaysProjectEnvironmentVariable}"); } RequesterPaysBucket = CreateRequesterPaysBucket(); } }
public static string VoiceStorage(int userId,int catId, string URL, Dictionary<string, int> voicesCounter) { Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", @"C:\keys\wordproject -29b2e0d3e0d5.json"); // upload the image storage string voiceName; if(voicesCounter.Count>0) //voicesCounter[BLLcategory.GetCategoryById(catId).CategoryName]++ voiceName = "voice" + BLLcategory.GetCategoryById(catId).CategoryName + voicesCounter[BLLcategory.GetCategoryById(catId).CategoryName]++ + ".mp3"; else { List<COMimageObject> objs = new List<COMimageObject>(); foreach (COMimage img in BLLimage.Getimages().FindAll(img => img.UserId == userId)) { objs.AddRange(BLLobject.GetObjects().FindAll(obj => obj.ImageID == img.ImageID)); } string add = catId == 6 ? BLLuser.GetUserById(userId).CategoryName : BLLcategory.GetCategoryById(catId).CategoryName; voiceName = "voice" + add + objs.Count + ".mp3"; } string bucketName = "objectsound"; var storage = StorageClient.Create(); using (var f = File.OpenRead(URL)) { try { var res = storage.UploadObject(bucketName, voiceName, null, f); URL = "https://storage.googleapis.com/" + bucketName + "/" + voiceName; } catch (Exception e) { throw e; } } return URL; }
public void UploadFileWithKmsKey( string projectId = "your-project-id", string bucketName = "your-unique-bucket-name", string keyLocation = "us-west1", string kmsKeyRing = "kms-key-ring", string kmsKeyName = "key-name", string localPath = "my-local-path/my-file-name", string objectName = "my-file-name") { // KMS Key identifier of an already created KMS key. // If you use the Google.Cloud.Kms.V1 library, you can construct these names using helper class CryptoKeyName. // var fullKeyName = new CryptoKeyName(projectId, keyLocation, kmsKeyRing, kmsKeyName).ToString(); string keyPrefix = $"projects/{projectId}/locations/{keyLocation}"; string fullKeyringName = $"{keyPrefix}/keyRings/{kmsKeyRing}"; string fullKeyName = $"{fullKeyringName}/cryptoKeys/{kmsKeyName}"; var storage = StorageClient.Create(); using var fileStream = File.OpenRead(localPath); storage.UploadObject(bucketName, objectName, null, fileStream, new UploadObjectOptions { KmsKeyName = fullKeyName }); Console.WriteLine($"Uploaded {objectName}."); }
// [END storage_print_bucket_acl_for_user] // [START storage_add_bucket_owner] private void AddBucketOwner(string bucketName, string userEmail) { var storage = StorageClient.Create(); var bucket = storage.GetBucket(bucketName, new GetBucketOptions() { Projection = Projection.Full }); if (null == bucket.Acl) { bucket.Acl = new List <BucketAccessControl>(); } bucket.Acl.Add(new BucketAccessControl() { Bucket = bucketName, Entity = $"user-{userEmail}", Role = "OWNER", }); var updatedBucket = storage.UpdateBucket(bucket, new UpdateBucketOptions() { // Avoid race conditions. IfMetagenerationMatch = bucket.Metageneration, }); }
static void Main(string[] args) { // Your Google Cloud Platform project ID. string projectId = "webservicesjustsolution"; // Instantiates a client. StorageClient storageClient = StorageClient.Create(); // The name for the new bucket. string bucketName = projectId + "-test-bucket"; try { // Creates the new bucket. storageClient.CreateBucket(projectId, bucketName); Console.WriteLine($"Bucket {bucketName} created."); } catch (Google.GoogleApiException e) when(e.Error.Code == 409) { // The bucket already exists. That's fine. Console.WriteLine(e.Error.Message); } }
/// <summary> /// Uploads the file onto Google Storage. /// </summary> private async Task GSUpload() { OnGSUploading(EventArgs.Empty); try { var client = StorageClient.Create(); var objectName = System.IO.Path.GetFileName(ReencodedPath); var contentType = "text/plain"; var options = new UploadObjectOptions(); // Create a temporary uploader so the upload session can be manually initiated without actually uploading. var tempUploader = client.CreateObjectUploader(Bucket, objectName, contentType, new MemoryStream(), options); var uploadUri = await tempUploader.InitiateSessionAsync(); // Send uploadUri to (unauthenticated) client application, so it can perform the upload: using (var stream = File.OpenRead(ReencodedPath)) { IProgress <IUploadProgress> progress = new Progress <IUploadProgress>( p => { GSUploadProgress = (float)p.BytesSent / stream.Length * 100; OnGSUploading(EventArgs.Empty); } ); var actualUploader = ResumableUpload.CreateFromUploadUri(uploadUri, stream); actualUploader.ProgressChanged += progress.Report; actualUploader.ChunkSize = ResumableUpload.MinimumChunkSize * 4; await actualUploader.UploadAsync(); } GSUri = GetGSUriFromObjectName(Bucket, objectName); OnGSUploaded(EventArgs.Empty); } catch (Exception e) { throw new GSUploadException("An error occured while uploading the file.", e); } }
private async Task Run() { //ComputeCredential credentials = new ComputeCredential(new ComputeCredential.Initializer()); GoogleCredential credential = await GoogleCredential.GetApplicationDefaultAsync(); if (credential.IsCreateScopedRequired) { credential = credential.CreateScoped(new string[] { Oauth2Service.Scope.UserinfoEmail }); } var service = new Oauth2Service(new BaseClientService.Initializer() { HttpClientInitializer = credential, ApplicationName = "Oauth2 Sample", }); Console.WriteLine(service.Userinfo.Get().Execute().Email); var client = StorageClient.Create(); foreach (var obj in client.ListObjects("your_project", "")) { Console.WriteLine(obj.Name); } }
public async Task UploadObjectAsync(string bucketName, Stream stream, string objectName, string contentType) { #region MyRegion if (string.IsNullOrEmpty(bucketName)) { throw new ArgumentException(nameof(bucketName)); } if (stream == null || stream.Length <= 0) { throw new ArgumentException(nameof(stream)); } if (string.IsNullOrEmpty(objectName)) { throw new ArgumentException(nameof(objectName)); } if (string.IsNullOrEmpty(contentType)) { throw new ArgumentException(nameof(contentType)); } #endregion var storage = StorageClient.Create(); await storage.UploadObjectAsync(bucketName, objectName, contentType, stream); }
public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions) { // Override point for customization after application launch. // If not required for your application you can safely delete this method this.Window = new UIWindow(UIScreen.MainScreen.Bounds); var storageClient = new StorageClient(); var imageDownloader = new ImageDownloader(storageClient); var movieSearchController = new MovieSearchController(new MovieService(), imageDownloader); var movieTopListController = new MovieTopListController(new MovieService(), imageDownloader); var navSearchController = new UINavigationController(movieSearchController); var navTopListController = new UINavigationController(movieTopListController); var tabBarController = new TabBarController() { ViewControllers = new UIViewController[] { navSearchController, navTopListController } }; this.Window.RootViewController = tabBarController; //this.Window.RootViewController = new UINavigationController(movieSearchController); this.Window.MakeKeyAndVisible(); return(true); }
public async Task <StoragePutResult> PutAsync(string path, Stream content, string contentType, CancellationToken cancellationToken = default) { using (var storage = await StorageClient.CreateAsync()) using (var seekableContent = new MemoryStream()) { await content.CopyToAsync(seekableContent, 65536, cancellationToken); seekableContent.Position = 0; var objectName = CoercePath(path); try { // attempt to upload, succeeding only if the object doesn't exist await storage.UploadObjectAsync(_bucketName, objectName, contentType, seekableContent, new UploadObjectOptions { IfGenerationMatch = 0 }, cancellationToken); return(StoragePutResult.Success); } catch (GoogleApiException e) when(e.HttpStatusCode == HttpStatusCode.PreconditionFailed) { // the object already exists; get the hash of its content from its metadata var existingObject = await storage.GetObjectAsync(_bucketName, objectName, cancellationToken : cancellationToken); var existingHash = Convert.FromBase64String(existingObject.Md5Hash); // hash the content that was uploaded seekableContent.Position = 0; byte[] contentHash; using (var md5 = MD5.Create()) contentHash = md5.ComputeHash(seekableContent); // conflict if the two hashes are different return(existingHash.SequenceEqual(contentHash) ? StoragePutResult.AlreadyExists : StoragePutResult.Conflict); } } }
private async Task GetImageAsync(IDialogContext context, IAwaitable <IMessageActivity> result) { try { var message = await result; if (message.Attachments.Count == 0) { await context.PostAsync("You should send an image file."); } else { //You don't need to pass credential when deploying to Compute Engine as it picks up the credential automatically. //Your code will look like this - var client = StorageClient.Create(); var credential = GoogleCredential.FromFile(@"PATHTOKEY.json"); var client = StorageClient.Create(credential); Bucket bucket = null; bucket = await SetUpGCP(client, bucket); // Upload some files using (WebClient webClient = new WebClient()) { using (Stream stream = webClient.OpenRead(message.Attachments[0].ContentUrl)) { await client.UploadObjectAsync(bucket.Name, message.Attachments[0].Name, message.Attachments[0].ContentType, stream, new UploadObjectOptions { PredefinedAcl = PredefinedObjectAcl.PublicRead }); } } await context.PostAsync("Image Uploaded successfully"); } } catch (Exception ex) { Console.WriteLine(ex.Message); } context.EndConversation("See you soon"); }
public static string UserImageStorage(COMimage image, string base64) { int counter = BLLimage.Getimages().FindAll(img => img.UserId == image.UserId).Count; Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", @"C:\keys\wordproject-29b2e0d3e0d5.json"); string imageName = BLLuser.GetUserById(image.UserId).CategoryName + counter + ".jpg"; string path = System.IO.Path.GetTempFileName(); byte[] byte1 = Convert.FromBase64String(base64); try { File.WriteAllBytes(path, byte1); } catch (Exception e) { throw (e); } //string imageName = "bla2"; string bucketName = "usersimages"; var storage = StorageClient.Create(); using (var f = File.OpenRead(path)) try { var res = storage.UploadObject(bucketName, imageName, null, f); //(bucketName +"/"+ folderName, imageName, null, f); image.URL = "https://storage.googleapis.com/" + bucketName + "/" + imageName; } catch (Exception) { throw; } return(image.URL); }
public void UpdateObject() { var bucketName = _fixture.BucketName; // Snippet: UpdateObject var client = StorageClient.Create(); var name = "update-example.txt"; var content = Encoding.UTF8.GetBytes("hello, world"); var obj = new Apis.Storage.v1.Data.Object { Bucket = bucketName, Name = name, ContentType = "text/json", Metadata = new Dictionary <string, string> { { "key1", "value1" }, { "key2", "value2" } } }; obj = client.UploadObject(obj, new MemoryStream(content)); obj.Metadata.Remove("key1"); obj.Metadata["key2"] = "updated-value2"; obj.Metadata["key3"] = "value3"; obj.ContentType = "text/plain"; client.UpdateObject(obj); // End snippet var fetchedObject = client.GetObject(bucketName, name); Assert.Equal(name, fetchedObject.Name); Assert.False(fetchedObject.Metadata.ContainsKey("key1")); Assert.Equal("text/plain", fetchedObject.ContentType); Assert.Equal("updated-value2", fetchedObject.Metadata["key2"]); Assert.Equal("value3", fetchedObject.Metadata["key3"]); }
// [START vision_text_detection_pdf_gcs] private static object DetectDocument(string gcsSourceUri, string gcsDestinationBucketName, string gcsDestinationPrefixName) { var client = ImageAnnotatorClient.Create(); var asyncRequest = new AsyncAnnotateFileRequest { InputConfig = new InputConfig { GcsSource = new GcsSource { Uri = gcsSourceUri }, // Supported mime_types are: 'application/pdf' and 'image/tiff' MimeType = "application/pdf" }, OutputConfig = new OutputConfig { // How many pages should be grouped into each json output file. BatchSize = 2, GcsDestination = new GcsDestination { Uri = $"gs://{gcsDestinationBucketName}/{gcsDestinationPrefixName}" } } }; asyncRequest.Features.Add(new Feature { Type = Feature.Types.Type.DocumentTextDetection }); List <AsyncAnnotateFileRequest> requests = new List <AsyncAnnotateFileRequest>(); requests.Add(asyncRequest); var operation = client.AsyncBatchAnnotateFiles(requests); Console.WriteLine("Waiting for the operation to finish"); operation.PollUntilCompleted(); // Once the rquest has completed and the output has been // written to GCS, we can list all the output files. var storageClient = StorageClient.Create(); // List objects with the given prefix. var blobList = storageClient.ListObjects(gcsDestinationBucketName, gcsDestinationPrefixName); Console.WriteLine("Output files:"); foreach (var blob in blobList) { Console.WriteLine(blob.Name); } // Process the first output file from GCS. // Select the first JSON file from the objects in the list. var output = blobList.Where(x => x.Name.Contains(".json")).First(); var jsonString = ""; using (var stream = new MemoryStream()) { storageClient.DownloadObject(output, stream); jsonString = System.Text.Encoding.UTF8.GetString(stream.ToArray()); } var response = JsonParser.Default .Parse <AnnotateFileResponse>(jsonString); // The actual response for the first page of the input file. var firstPageResponses = response.Responses[0]; var annotation = firstPageResponses.FullTextAnnotation; // Here we print the full text from the first page. // The response contains more information: // annotation/pages/blocks/paragraphs/words/symbols // including confidence scores and bounding boxes Console.WriteLine($"Full text: \n {annotation.Text}"); return(0); }
public GoogleStorage() { this._wrappedClient = StorageClient.Create(); }
public StorageFixture() { Client = StorageClient.Create(); Bucket = IdGenerator.FromDateTime(prefix: "tests-", suffix: "-data-protection"); Client.CreateBucket(ProjectId, Bucket); }
public void LoadNamedConnectionFromConfiguration_Sccessful() { var client = new StorageClient("azure"); }
static void Main() { //var computeClient = new ComputeClient("5cd104e23fc947668a6c74fe63fd77e7/godbold_1310683369246", "FGJeXUzxCz5poHoSzRzmMTceuek="); //var machineResponse = computeClient.VirtualMachine.Deploy(new DeployVirtualMachineRequest // { // DisplayName = "Steves Machine", // Name = "Steve Test", // TemplateId = "Dummy", // ZoneId = "Dummy", // ServiceOfferingId = "Dummy" // }); //computeClient.VirtualMachine.Start(new StartVirtualMachineRequest { MachineId = machineResponse.Id }); var demoContent = File.ReadAllBytes(Path.Combine(Environment.CurrentDirectory, "DemoFile.txt")); var storageClient = new StorageClient("5cd104e23fc947668a6c74fe63fd77e7/godbold_1310683369246", "FGJeXUzxCz5poHoSzRzmMTceuek="); var storedObjectResponse = storageClient.StoredObject.CreateObject(new CreateObjectRequest { Content = demoContent, Resource = new Uri("objects", UriKind.Relative), GroupACL = "other=NONE", ACL = "godbold=FULL_CONTROL", Metadata = "part1=buy", ListableMetadata = "part4/part7/part8=quick" }); Console.WriteLine("Object stored at {0}", storedObjectResponse.Location); Console.ReadKey(); var namespaceCreateResponse = storageClient.StoredObject.CreateObject(new CreateObjectRequest { Resource = new Uri("namespace/test/profiles of stuff/", UriKind.Relative), GroupACL = "other=NONE", ACL = "godbold=FULL_CONTROL", Metadata = "part1=buy", ListableMetadata = "part4/part7/part8=quick" }); Console.WriteLine("Namespace created at {0}", namespaceCreateResponse.Location); Console.ReadKey(); var getNamespaceResponse = storageClient.StoredObject.GetObject(new GetObjectRequest { Resource = new Uri(namespaceCreateResponse.Location, UriKind.Relative), }); var namespaceContent = Encoding.ASCII.GetChars(getNamespaceResponse.Content, 0, getNamespaceResponse.Content.Length); var namespaceContentString = new string(namespaceContent); Console.WriteLine("Namespace {0} retrieved", namespaceCreateResponse.Location); Console.WriteLine("Content: {0}", namespaceContentString); Console.ReadKey(); Console.WriteLine("Deleting namespace at {0}", namespaceCreateResponse.Location); storageClient.StoredObject.DeleteObject(new DeleteObjectRequest { Resource = new Uri(namespaceCreateResponse.Location, UriKind.Relative) }); Console.WriteLine("Namespace at {0} deleted", namespaceCreateResponse.Location); Console.ReadKey(); var getFullObjectResponse = storageClient.StoredObject.GetObject(new GetObjectRequest { Resource = new Uri(storedObjectResponse.Location, UriKind.Relative), }); var allContent = Encoding.ASCII.GetChars(getFullObjectResponse.Content, 0, getFullObjectResponse.Content.Length); var allContentString = new string(allContent); Console.WriteLine("Object {0} retrieved", storedObjectResponse.Location); Console.WriteLine("Content: {0}", allContentString); Console.ReadKey(); var getObjectResponse = storageClient.StoredObject.GetObject(new GetObjectRequest { Resource = new Uri(storedObjectResponse.Location, UriKind.Relative), LowerRange = 10 }); var content = Encoding.ASCII.GetChars(getObjectResponse.Content, 0, getObjectResponse.Content.Length); var contentString = new string(content); Console.WriteLine("Object {0} retrieved", storedObjectResponse.Location); Console.WriteLine("Content: {0}", contentString); Console.ReadKey(); Console.WriteLine("Updating object stored at {0}", storedObjectResponse.Location); storageClient.StoredObject.UpdateObject(new UpdateObjectRequest { Resource = new Uri(storedObjectResponse.Location, UriKind.Relative), Content = demoContent }); Console.WriteLine("Object at {0} was updated", storedObjectResponse.Location); Console.ReadKey(); Console.WriteLine("Deleting object stored at {0}", storedObjectResponse.Location); storageClient.StoredObject.DeleteObject(new DeleteObjectRequest { Resource = new Uri(storedObjectResponse.Location, UriKind.Relative) }); Console.WriteLine("Object at {0} deleted", storedObjectResponse.Location); Console.ReadKey(); }
/// <summary> /// initialize /// </summary> private GcsService() { this.storage = Auth.NewStorageClient(); }
private FileNode Download(string path, bool check = false) { if (_masterCon == null) return null; var fullpath = Path.Combine(WorkDir, path); //Debug.Assert(!File.Exists(fullpath)); var gw = new StorageClient(_taskName, _masterCon); using(var f = File.Create(fullpath, FileNode.MaxCachedSize)) { long pos = 0; while (true) { byte[] buf = gw.ReadData(path, pos, FileNode.MaxCachedSize); f.Write(buf, 0, buf.Length); pos += buf.Length; if (buf.Length != FileNode.MaxCachedSize) break; } f.Flush(); } var ret = PreRead(path); if (check) { var len = gw.GetSize(path); var hs = gw.GetHash(path); Debug.Assert(len.HasValue && hs.HasValue); if (ret.Length != len.Value) throw new Exception("File '" + path + "' corrupted: length"); if (ret.Hash != hs.Value) throw new Exception("File '" + path + "' corrupted: hash wrong"); } return ret; }
public CloudManager() { _storageClient = StorageClient.Create(); }
private FileNode PreRead(string path) { var fullpath = Path.Combine(WorkDir, path); var fi = new FileInfo(fullpath); if (fi.Exists) { var size = fi.Length; ulong hash; if (_masterCon != null) { var gw = new StorageClient(_taskName, _masterCon); var gwSize = gw.GetSize(path); if (gwSize != size) { if (gwSize.HasValue) fi.Delete(); return null; } hash = HashLib.HashFactory.Hash64.CreateMurmur2().ComputeFile(fullpath).GetULong(); var gwHash = gw.GetHash(path); if (gwHash != hash) { if (gwHash.HasValue) fi.Delete(); return null; } } else hash = HashLib.HashFactory.Hash64.CreateMurmur2().ComputeFile(fullpath).GetULong(); var n = new FileNode { Hash = hash, Length = fi.Length, ChangeDate = fi.LastWriteTime }; _fset.Tree.Add(path, n); return n; } return null; }
/** * entry point * * @param args comand arguments * <ul><li>args[0]: config filename</li></ul> * <ul><li>args[1]: local filename to upload</li></ul> */ public static void main(string[] args) { if (args.Length < 2) { Console.WriteLine("Error: Must have 2 parameters, one is config filename, " + "the other is the local filename to upload"); return; } Console.WriteLine("dotnetcore.version=" + typeof(object).GetTypeInfo().Assembly.GetName().Version.ToString()); string conf_filename = args[0]; string local_filename = args[1]; try { ClientGlobal.init(conf_filename); Console.WriteLine("network_timeout=" + ClientGlobal.g_network_timeout + "ms"); Console.WriteLine("charset=" + ClientGlobal.g_charset); long startTime; string group_name; string remote_filename; ServerInfo[] servers; TrackerClient tracker = new TrackerClient(); TrackerServer trackerServer = tracker.getTrackerServer(); StorageServer storageServer = null; /* * storageServer = tracker.getStoreStorage(trackerServer); * if (storageServer == null) * { * Console.WriteLine("getStoreStorage fail, error code: " + tracker.getErrorCode()); * return; * } */ StorageClient client = new StorageClient(trackerServer, storageServer); byte[] file_buff; NameValuePair[] meta_list; string[] results; string master_filename; string prefix_name; string file_ext_name; string generated_slave_filename; int errno; meta_list = new NameValuePair[4]; meta_list[0] = new NameValuePair("width", "800"); meta_list[1] = new NameValuePair("heigth", "600"); meta_list[2] = new NameValuePair("bgcolor", "#FFFFFF"); meta_list[3] = new NameValuePair("author", "Mike"); file_buff = ClientGlobal.g_charset.GetBytes("this is a test"); Console.WriteLine("file Length: " + file_buff.Length); group_name = null; StorageServer[] storageServers = tracker.getStoreStorages(trackerServer, group_name); if (storageServers == null) { Log.Error("get store storage servers fail, error code: " + tracker.getErrorCode()); } else { Log.Error("store storage servers count: " + storageServers.Length); for (int k = 0; k < storageServers.Length; k++) { Log.Error((k + 1) + ". " + storageServers[k].getInetSocketAddress().Address + ":" + storageServers[k].getInetSocketAddress().Port); } Log.Error(""); } startTime = DateTime.Now.Ticks; results = client.upload_file(file_buff, "txt", meta_list); Console.WriteLine("upload_file time used: " + (DateTime.Now.Ticks - startTime) + " ms"); /* * group_name = ""; * results = client.upload_file(group_name, file_buff, "txt", meta_list); */ if (results == null) { Log.Error("upload file fail, error code: " + client.getErrorCode()); return; } else { group_name = results[0]; remote_filename = results[1]; Log.Error("group_name: " + group_name + ", remote_filename: " + remote_filename); Log.Error(client.get_file_info(group_name, remote_filename).ToString()); servers = tracker.getFetchStorages(trackerServer, group_name, remote_filename); if (servers == null) { Log.Error("get storage servers fail, error code: " + tracker.getErrorCode()); } else { Log.Error("storage servers count: " + servers.Length); for (int k = 0; k < servers.Length; k++) { Log.Error((k + 1) + ". " + servers[k].getIpAddr() + ":" + servers[k].getPort()); } Log.Error(""); } meta_list = new NameValuePair[4]; meta_list[0] = new NameValuePair("width", "1024"); meta_list[1] = new NameValuePair("heigth", "768"); meta_list[2] = new NameValuePair("bgcolor", "#000000"); meta_list[3] = new NameValuePair("title", "Untitle"); startTime = DateTime.Now.Ticks; errno = client.set_metadata(group_name, remote_filename, meta_list, (byte)ProtoCommon.STORAGE_SET_METADATA_FLAG_MERGE); Console.WriteLine("set_metadata time used: " + (DateTime.Now.Ticks - startTime) + " ms"); if (errno == 0) { Log.Error("set_metadata success"); } else { Log.Error("set_metadata fail, error no: " + errno); } meta_list = client.get_metadata(group_name, remote_filename); if (meta_list != null) { for (int i = 0; i < meta_list.Length; i++) { Console.WriteLine(meta_list[i].getName() + " " + meta_list[i].getValue()); } } //Thread.sleep(30000); startTime = DateTime.Now.Ticks; file_buff = client.download_file(group_name, remote_filename); Console.WriteLine("download_file time used: " + (DateTime.Now.Ticks - startTime) + " ms"); if (file_buff != null) { Console.WriteLine("file Length:" + file_buff.Length); Console.WriteLine(ClientGlobal.g_charset.GetString(file_buff)); } file_buff = ClientGlobal.g_charset.GetBytes("this is a slave buff"); master_filename = remote_filename; prefix_name = "-part1"; file_ext_name = "txt"; startTime = DateTime.Now.Ticks; results = client.upload_file(group_name, master_filename, prefix_name, file_buff, file_ext_name, meta_list); Console.WriteLine("upload_file time used: " + (DateTime.Now.Ticks - startTime) + " ms"); if (results != null) { Log.Error("slave file group_name: " + results[0] + ", remote_filename: " + results[1]); generated_slave_filename = ProtoCommon.genSlaveFilename(master_filename, prefix_name, file_ext_name); if (generated_slave_filename != results[1]) { Log.Error("generated slave file: " + generated_slave_filename + "\n != returned slave file: " + results[1]); } Log.Error(client.get_file_info(results[0], results[1]).ToString()); } startTime = DateTime.Now.Ticks; errno = client.delete_file(group_name, remote_filename); Console.WriteLine("delete_file time used: " + (DateTime.Now.Ticks - startTime) + " ms"); if (errno == 0) { Log.Error("Delete file success"); } else { Log.Error("Delete file fail, error no: " + errno); } } results = client.upload_file(local_filename, null, meta_list); if (results != null) { string file_id; int ts; string token; string file_url; InetSocketAddress inetSockAddr; group_name = results[0]; remote_filename = results[1]; file_id = group_name + StorageClient1.SPLIT_GROUP_NAME_AND_FILENAME_SEPERATOR + remote_filename; inetSockAddr = trackerServer.getInetSocketAddress(); file_url = "http://" + inetSockAddr.Address; if (ClientGlobal.g_tracker_http_port != 80) { file_url += ":" + ClientGlobal.g_tracker_http_port; } file_url += "/" + file_id; if (ClientGlobal.g_anti_steal_token) { ts = (int)(DateTime.Now.Ticks / 1000); token = ProtoCommon.getToken(file_id, ts, ClientGlobal.g_secret_key); file_url += "?token=" + token + "&ts=" + ts; } Log.Error("group_name: " + group_name + ", remote_filename: " + remote_filename); Log.Error(client.get_file_info(group_name, remote_filename).ToString()); Log.Error("file url: " + file_url); errno = client.download_file(group_name, remote_filename, 0, 0, "c:\\" + remote_filename.Replace("/", "_")); if (errno == 0) { Log.Error("Download file success"); } else { Log.Error("Download file fail, error no: " + errno); } errno = client.download_file(group_name, remote_filename, 0, 0, new DownloadFileWriter("c:\\" + remote_filename.Replace("/", "-"))); if (errno == 0) { Log.Error("Download file success"); } else { Log.Error("Download file fail, error no: " + errno); } master_filename = remote_filename; prefix_name = "-part2"; file_ext_name = null; startTime = DateTime.Now.Ticks; results = client.upload_file(group_name, master_filename, prefix_name, local_filename, null, meta_list); Console.WriteLine("upload_file time used: " + (DateTime.Now.Ticks - startTime) + " ms"); if (results != null) { Log.Error("slave file group_name: " + results[0] + ", remote_filename: " + results[1]); generated_slave_filename = ProtoCommon.genSlaveFilename(master_filename, prefix_name, file_ext_name); if (generated_slave_filename != results[1]) { Log.Error("generated slave file: " + generated_slave_filename + "\n != returned slave file: " + results[1]); } Log.Error(client.get_file_info(results[0], results[1]).ToString()); } } System.IO.FileInfo f; f = new System.IO.FileInfo(local_filename); int nPos = local_filename.LastIndexOf('.'); if (nPos > 0 && local_filename.Length - nPos <= ProtoCommon.FDFS_FILE_EXT_NAME_MAX_LEN + 1) { file_ext_name = local_filename.Substring(nPos + 1); } else { file_ext_name = null; } results = client.upload_file(null, f.Length, new UploadLocalFileSender(local_filename), file_ext_name, meta_list); if (results != null) { group_name = results[0]; remote_filename = results[1]; Console.WriteLine("group name: " + group_name + ", remote filename: " + remote_filename); Console.WriteLine(client.get_file_info(group_name, remote_filename)); master_filename = remote_filename; prefix_name = "-part3"; startTime = DateTime.Now.Ticks; results = client.upload_file(group_name, master_filename, prefix_name, f.Length, new UploadLocalFileSender(local_filename), file_ext_name, meta_list); Console.WriteLine("upload_file time used: " + (DateTime.Now.Ticks - startTime) + " ms"); if (results != null) { Log.Error("slave file group_name: " + results[0] + ", remote_filename: " + results[1]); generated_slave_filename = ProtoCommon.genSlaveFilename(master_filename, prefix_name, file_ext_name); if (generated_slave_filename != results[1]) { Log.Error("generated slave file: " + generated_slave_filename + "\n != returned slave file: " + results[1]); } Log.Error(client.get_file_info(results[0], results[1]).ToString()); } } else { Log.Error("Upload file fail, error no: " + errno); } storageServer = tracker.getFetchStorage(trackerServer, group_name, remote_filename); if (storageServer == null) { Console.WriteLine("getFetchStorage fail, errno code: " + tracker.getErrorCode()); return; } /* for test only */ Console.WriteLine("active test to storage server: " + storageServer.getConnection().activeTest()); /* for test only */ Console.WriteLine("active test to tracker server: " + trackerServer.getConnection().activeTest()); } catch (Exception ex) { Log.Error(ex.Message + ex.StackTrace); } }
public void TestSetup() { _demoContent = Encoding.ASCII.GetBytes("Some text from a file"); _storageClient = new StorageClient("5cd104e23fc947668a6c74fe63fd77e7/godbold_1310683369246", "FGJeXUzxCz5poHoSzRzmMTceuek="); }