/// <summary> /// Uploads a file to azure store. /// </summary> /// <param name="storageName">Store which file will be uploaded to</param> /// <param name="storageKey">Store access key</param> /// <param name="filePath">Path to file which will be uploaded</param> /// <param name="blobRequestOptions">The request options for blob uploading.</param> /// <returns>Uri which holds locates the uploaded file</returns> /// <remarks>The uploaded file name will be guid</remarks> public static Uri UploadFile(string storageName, string storageKey, string filePath, BlobRequestOptions blobRequestOptions) { string baseAddress = General.BlobEndpointUri(storageName); var credentials = new StorageCredentialsAccountAndKey(storageName, storageKey); var client = new CloudBlobClient(baseAddress, credentials); string blobName = Guid.NewGuid().ToString(); CloudBlobContainer container = client.GetContainerReference(ContainerName); container.CreateIfNotExist(); CloudBlob blob = container.GetBlobReference(blobName); using (FileStream readStream = File.OpenRead(filePath)) { blob.UploadFromStream(readStream, blobRequestOptions); } return new Uri( string.Format( CultureInfo.InvariantCulture, "{0}{1}{2}{3}", client.BaseUri, ContainerName, client.DefaultDelimiter, blobName)); }
public virtual Uri UploadFile( string storageName, Uri blobEndpointUri, string storageKey, string filePath, BlobRequestOptions blobRequestOptions) { StorageCredentials credentials = new StorageCredentials(storageName, storageKey); CloudBlobClient client = new CloudBlobClient(blobEndpointUri, credentials); string blobName = string.Format( CultureInfo.InvariantCulture, "{0}_{1}", DateTime.UtcNow.ToString("yyyyMMdd_HHmmss", CultureInfo.InvariantCulture), Path.GetFileName(filePath)); CloudBlobContainer container = client.GetContainerReference(ContainerName); container.CreateIfNotExists(); CloudBlockBlob blob = container.GetBlockBlobReference(blobName); BlobRequestOptions uploadRequestOption = blobRequestOptions ?? new BlobRequestOptions(); if (!uploadRequestOption.ServerTimeout.HasValue) { uploadRequestOption.ServerTimeout = TimeSpan.FromMinutes(300); } using (FileStream readStream = File.OpenRead(filePath)) { blob.UploadFromStream(readStream, AccessCondition.GenerateEmptyCondition(), uploadRequestOption); } return new Uri(string.Format(CultureInfo.InvariantCulture, "{0}{1}{2}{3}", client.BaseUri, ContainerName, client.DefaultDelimiter, blobName)); }
internal static Task<WebResponse> GetResponseAsyncWithTimeout(this WebRequest req, CloudBlobClient service, TimeSpan? timeout) { Task<WebResponse> serverTask = req.GetResponseAsync(service); Task<WebResponse> wrappedTask = TimeoutHelper.GetTimeoutWrappedTask(timeout, serverTask); return wrappedTask; }
/// <summary> /// Initializes a new instance of the <see cref="AzureServerPackageRepository"/> class. /// </summary> /// <param name="packageLocator">The package locator.</param> /// <param name="packageSerializer">The package serializer.</param> public AzureServerPackageRepository(IPackageLocator packageLocator, IAzurePackageSerializer packageSerializer) { _packageLocator = packageLocator; _packageSerializer = packageSerializer; var azureConnectionString = CloudConfigurationManager.GetSetting("StorageConnectionString"); _storageAccount = Microsoft.WindowsAzure.Storage.CloudStorageAccount.Parse(azureConnectionString); _blobClient = _storageAccount.CreateCloudBlobClient(); _helper = Microsoft.WindowsAzure.CloudStorageAccount.Parse(azureConnectionString); }
/// <summary> /// Initializes a new instance of the <see cref="AzureBlobStorage" /> class. /// </summary> /// <param name="account">The Azure account to use.</param> /// <param name="containerAddress">The name of the Azure blob container to use for uploaded blobs.</param> public AzureBlobStorage(CloudStorageAccount account, string containerAddress) { Requires.NotNull(account, "account"); Requires.NotNullOrEmpty(containerAddress, "containerAddress"); Requires.Argument(DesktopUtilities.IsValidBlobContainerName(containerAddress), "containerAddress", "Invalid container name."); this.account = account; this.client = this.account.CreateCloudBlobClient(); this.container = this.client.GetContainerReference(containerAddress); }
public AzureServerPackageRepository(IPackageLocator packageLocator, IAzurePackageSerializer packageSerializer, Microsoft.WindowsAzure.Storage.CloudStorageAccount storageAccount) { _packageLocator = packageLocator; _packageSerializer = packageSerializer; _storageAccount = storageAccount; _blobClient = _storageAccount.CreateCloudBlobClient(); }
public static void DeletePackageFromBlob(IServiceManagement channel, string storageName, string subscriptionId, Uri packageUri) { var storageService = channel.GetStorageKeys(subscriptionId, storageName); var storageKey = storageService.StorageServiceKeys.Primary; storageService = channel.GetStorageService(subscriptionId, storageName); var blobStorageEndpoint = new Uri(storageService.StorageServiceProperties.Endpoints.Find(p => p.Contains(BlobEndpointIdentifier))); var credentials = new StorageCredentials(storageName, storageKey); var client = new CloudBlobClient(blobStorageEndpoint, credentials); ICloudBlob blob = client.GetBlobReferenceFromServer(packageUri); blob.DeleteIfExists(); }
public virtual void DeletePackageFromBlob( StorageManagementClient storageClient, string storageName, Uri packageUri) { StorageAccountGetKeysResponse keys = storageClient.StorageAccounts.GetKeys(storageName); string storageKey = keys.PrimaryKey; var storageService = storageClient.StorageAccounts.Get(storageName); var blobStorageEndpoint = storageService.StorageAccount.Properties.Endpoints[0]; var credentials = new StorageCredentials(storageName, storageKey); var client = new CloudBlobClient(blobStorageEndpoint, credentials); ICloudBlob blob = client.GetBlobReferenceFromServer(packageUri); blob.DeleteIfExists(); }
public static void RemoveVHD(IServiceManagement channel, string subscriptionId, Uri mediaLink) { var accountName = mediaLink.Host.Split('.')[0]; var blobEndpoint = new Uri(mediaLink.GetComponents(UriComponents.SchemeAndServer, UriFormat.Unescaped)); StorageService storageService; using (new OperationContextScope(channel.ToContextChannel())) { storageService = channel.GetStorageKeys(subscriptionId, accountName); } var storageAccountCredentials = new StorageCredentials(accountName, storageService.StorageServiceKeys.Primary); var client = new CloudBlobClient(blobEndpoint, storageAccountCredentials); var blob = client.GetBlobReferenceFromServer(mediaLink); blob.DeleteIfExists(); }
public void RemoveFiles(string directoryUri, string storageConnectionString, CancellationToken cancelToken, Action <string> fileDeletedAction, Action <string, Exception> fileFailedAction, Action completedAction) { storageAccount = CloudStorageAccount.Parse(storageConnectionString); blobClient = storageAccount.CreateCloudBlobClient(); var directoryPath = directoryUri; var searchPattern = string.Empty; var lastSegment = directoryUri.Substring(directoryUri.LastIndexOf("/") + 1); if (lastSegment.Contains('*') || lastSegment.Contains('?')) { directoryPath = directoryUri.Remove(directoryUri.Length - lastSegment.Length); searchPattern = lastSegment; var blobs = listing.ListBlobs(directoryPath, searchPattern, storageConnectionString, true); this.DeleteBlobs(blobs, cancelToken, fileDeletedAction, fileFailedAction); } else { try { var container = blobClient.GetContainerReference(directoryUri); container.Delete(); if (fileDeletedAction != null) { fileDeletedAction(directoryUri); } } catch (StorageClientException) { try { var blob = blobClient.GetBlobReference(directoryUri); blob.Delete(); if (fileDeletedAction != null) { fileDeletedAction(directoryUri); } } catch (StorageClientException) { try { var directory = blobClient.GetBlobDirectoryReference(directoryUri); var blobs = directory.ListBlobs().Select(i => i.Uri.ToString()).ToArray(); if (blobs.Length > 0) { this.DeleteBlobs(blobs, cancelToken, fileDeletedAction, fileFailedAction); } } catch (StorageClientException ex) { if (fileFailedAction != null) { fileFailedAction(directoryUri, ex); } } } } } if (completedAction != null) { completedAction(); } }
private CloudBlobClient GetStorageClient() { string storageRoot = this.storageAccountConfiguration.Name; if (storageRoot.Contains(".")) { storageRoot = string.Format(CultureInfo.InvariantCulture, "http://{0}", storageRoot); } else { storageRoot = string.Format(CultureInfo.InvariantCulture, ProductionStorageAccountEndpointUriTemplate, storageRoot); } var storageCredentials = new StorageCredentials( this.storageAccountConfiguration.Name.Split('.').First(), this.storageAccountConfiguration.Key); var storageAccountUri = new Uri(storageRoot); var blobClient = new CloudBlobClient(storageAccountUri, storageCredentials); return blobClient; }
private CloudBlobContainer GetCloudBlobContainer() { CloudBlobClient tableClient = this.storageAccount.CreateCloudBlobClient(); return(tableClient.GetContainerReference(this.blobContainerName)); }
/// <summary> /// Functional Cases : for New-AzureStorageContainer /// 1. Create a list of new blob containers (Positive 2) /// 2. Create a list of containers that some of them already exist (Negative 4) /// /// Functional Cases : for Get-AzureStorageContainer /// 3. Get a list of blob containers by using wildcards in the name (Positive 2) /// /// Functional Cases : for Remove-AzureStorageContainer /// 4. Remove a list of existing blob containers by using pipeline (Positive 6) /// </summary> internal void ContainerListOperations(Agent agent) { string PREFIX = Utility.GenNameString("uniqueprefix-") + "-"; string[] CONTAINER_NAMES = new string[] { Utility.GenNameString(PREFIX), Utility.GenNameString(PREFIX), Utility.GenNameString(PREFIX) }; // PART_EXISTING_NAMES differs only the last element with CONTAINER_NAMES string[] PARTLY_EXISTING_NAMES = new string[CONTAINER_NAMES.Length]; Array.Copy(CONTAINER_NAMES, PARTLY_EXISTING_NAMES, CONTAINER_NAMES.Length - 1); PARTLY_EXISTING_NAMES[CONTAINER_NAMES.Length - 1] = Utility.GenNameString(PREFIX); string[] MERGED_NAMES = CONTAINER_NAMES.Union(PARTLY_EXISTING_NAMES).ToArray(); Array.Sort(MERGED_NAMES); // Generate the comparison data Collection <Dictionary <string, object> > comp = new Collection <Dictionary <string, object> >(); foreach (string name in MERGED_NAMES) { comp.Add(Utility.GenComparisonData(StorageObjectType.Container, name)); } CloudBlobClient blobClient = StorageAccount.CreateCloudBlobClient(); // Check if all the above containers have been removed foreach (string name in MERGED_NAMES) { CloudBlobContainer container = blobClient.GetContainerReference(name); container.DeleteIfExists(); } //--------------1. New operation-------------- Test.Assert(agent.NewAzureStorageContainer(CONTAINER_NAMES), Utility.GenComparisonData("NewAzureStorageContainer", true)); // Verification for returned values Test.Assert(agent.Output.Count == CONTAINER_NAMES.Count(), "3 row returned : {0}", agent.Output.Count); // Check if all the above containers have been created foreach (string name in CONTAINER_NAMES) { CloudBlobContainer container = blobClient.GetContainerReference(name); Test.Assert(container.Exists(), "container {0} should exist", name); } try { //--------------2. New operation-------------- Test.Assert(!agent.NewAzureStorageContainer(CONTAINER_NAMES), Utility.GenComparisonData("NewAzureStorageContainer", false)); // Verification for returned values Test.Assert(agent.Output.Count == 0, "0 row returned : {0}", agent.Output.Count); int i = 0; foreach (string name in CONTAINER_NAMES) { Test.Assert(agent.ErrorMessages[i].Equals(String.Format("Container '{0}' already exists.", name)), agent.ErrorMessages[i]); ++i; } //--------------3. New operation-------------- Test.Assert(!agent.NewAzureStorageContainer(PARTLY_EXISTING_NAMES), Utility.GenComparisonData("NewAzureStorageContainer", false)); // Verification for returned values Test.Assert(agent.Output.Count == 1, "1 row returned : {0}", agent.Output.Count); // Check if all the above containers have been created foreach (string name in CONTAINER_NAMES) { CloudBlobContainer container = blobClient.GetContainerReference(name); Test.Assert(container.Exists(), "container {0} should exist", name); } //--------------4. Get operation-------------- // use wildcards Test.Assert(agent.GetAzureStorageContainer("*" + PREFIX + "*"), Utility.GenComparisonData("GetAzureStorageContainer", true)); // Verification for returned values agent.OutputValidation(StorageAccount.CreateCloudBlobClient().ListContainers(PREFIX, ContainerListingDetails.All)); // use Prefix parameter Test.Assert(agent.GetAzureStorageContainerByPrefix(PREFIX), Utility.GenComparisonData("GetAzureStorageContainerByPrefix", true)); // Verification for returned values agent.OutputValidation(StorageAccount.CreateCloudBlobClient().ListContainers(PREFIX, ContainerListingDetails.All)); } finally { } //--------------5. Remove operation-------------- Test.Assert(agent.RemoveAzureStorageContainer(MERGED_NAMES), Utility.GenComparisonData("RemoveAzureStorageContainer", true)); // Check if all the above containers have been removed foreach (string name in CONTAINER_NAMES) { CloudBlobContainer container = blobClient.GetContainerReference(name); Test.Assert(!container.Exists(), "container {0} should not exist", name); } }
public AzureStorageTransfer(CloudBlobClient cloudBlobClient, ILogger <AzureStorageTransfer> logger) { _cloudBlobClient = cloudBlobClient; _logger = logger; }
public void CopyFromToRestoreSnapshot(BlobContext context, string containerName, string blobName) { string oldText = "Old stuff"; string newText = "New stuff"; StorageCredentials accountAndKey = new StorageCredentials(context.Account, context.Key); CloudStorageAccount account = new CloudStorageAccount(accountAndKey, false); CloudBlobClient blobClient = new CloudBlobClient(new Uri(context.Address), account.Credentials); CloudBlobContainer container = blobClient.GetContainerReference(containerName); CloudBlockBlob blob = container.GetBlockBlobReference(blobName); BlobTestBase.UploadText(blob, oldText, Encoding.UTF8); CloudBlockBlob snapshot = blob.CreateSnapshot(); Assert.IsNotNull(snapshot.SnapshotTime); BlobTestBase.UploadText(blob, newText, Encoding.UTF8); Uri sourceUri = new Uri(snapshot.Uri.AbsoluteUri + "?snapshot=" + Request.ConvertDateTimeToSnapshotString(snapshot.SnapshotTime.Value)); OperationContext opContext = new OperationContext(); HttpWebRequest request = BlobHttpWebRequestFactory.CopyFrom(blob.Uri, 30, sourceUri, null, null, opContext); Assert.IsTrue(request != null, "Failed to create HttpWebRequest"); BlobTests.SignRequest(request, context); HttpWebResponse response = (HttpWebResponse)request.GetResponse(); Assert.AreEqual<HttpStatusCode>(response.StatusCode, HttpStatusCode.Accepted); string text = BlobTestBase.DownloadText(blob, Encoding.UTF8); Assert.AreEqual<string>(text, oldText); blob.Delete(DeleteSnapshotsOption.IncludeSnapshots, null, null); }
/// <summary> /// Initializes a new instance of the <see cref="CloudBlockBlob"/> class using a relative URI to the blob. /// </summary> /// <param name="blobUri">The relative URI to the blob, beginning with the container name.</param> /// <param name="client">A client object that specifies the endpoint for the Blob service.</param> public CloudBlockBlob(string blobUri, CloudBlobClient client) : this(blobUri, null, client) { }
/// <summary> /// Initializes a new instance of the <see cref="CloudBlockBlob"/> class using the specified blob Uri. /// If snapshotTime is not null, the blob instance represents a Snapshot. /// Note that this is just a reference to a blob instance and no requests are issued to the service /// yet to update the blob properties, attribute or metaddata. FetchAttributes is the API that /// issues such request to the service. /// </summary> /// <param name="blobUri">Relative Uri to the blob.</param> /// <param name="snapshotTime">Snapshot time in case the blob is a snapshot.</param> /// <param name="client">Existing Blob service client which provides the base address.</param> /// <param name="containerReference">The reference to the parent container.</param> internal CloudBlockBlob(string blobUri, DateTime?snapshotTime, CloudBlobClient client, CloudBlobContainer containerReference) : base(blobUri, snapshotTime, client, containerReference) { this.Properties.BlobType = BlobType.BlockBlob; }
/// <summary> /// Initializes a new instance of the <see cref="CloudBlockBlob"/> class using the specified blob Uri. /// Note that this is just a reference to a blob instance and no requests are issued to the service /// yet to update the blob properties, attribute or metaddata. FetchAttributes is the API that /// issues such request to the service. /// </summary> /// <param name="blobUri">Relative Uri to the blob.</param> /// <param name="client">Existing Blob service client which provides the base address.</param> /// <param name="containerReference">The reference to the parent container.</param> internal CloudBlockBlob(string blobUri, CloudBlobClient client, CloudBlobContainer containerReference) : this(blobUri, null, client, containerReference) { }
/// <summary> /// Initializes a new instance of the <see cref="CloudBlockBlob"/> class. /// </summary> /// <param name="attributes">The attributes.</param> /// <param name="serviceClient">The service client.</param> /// <param name="snapshotTime">The snapshot time.</param> internal CloudBlockBlob(BlobAttributes attributes, CloudBlobClient serviceClient, string snapshotTime) : base(attributes, serviceClient, snapshotTime) { this.Properties.BlobType = BlobType.BlockBlob; }
static void Main(string[] args) { Console.WriteLine("Blob encryption with Key Vault integration demonstrating key rotation from Key 1 to Key 2"); Console.WriteLine(); // Create two secrets and obtain their IDs. This is normally a one-time setup step. // Although it is possible to use keys (rather than secrets) stored in Key Vault, this prevents caching. // Therefore it is recommended to use secrets along with a caching resolver (see below). string keyID1 = EncryptionShared.KeyVaultUtility.SetUpKeyVaultSecret("KeyRotationSampleSecret1"); string keyID2 = EncryptionShared.KeyVaultUtility.SetUpKeyVaultSecret("KeyRotationSampleSecret2"); // Retrieve storage account information from connection string // How to create a storage connection string - https://azure.microsoft.com/en-us/documentation/articles/storage-configure-connection-string/ CloudStorageAccount storageAccount = EncryptionShared.Utility.CreateStorageAccountFromConnectionString(); CloudBlobClient client = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = client.GetContainerReference(DemoContainer + Guid.NewGuid().ToString("N")); // Construct a resolver capable of looking up keys and secrets stored in Key Vault. KeyVaultKeyResolver cloudResolver = new KeyVaultKeyResolver(EncryptionShared.KeyVaultUtility.GetAccessToken); // Set up a caching resolver so the secrets can be cached on the client. This is the recommended usage // pattern since the throttling targets for Storage and Key Vault services are orders of magnitude // different. CachingKeyResolver cachingResolver = new CachingKeyResolver(2, cloudResolver); // Create key instances corresponding to the key IDs. This will cache the secrets. IKey cloudKey1 = cachingResolver.ResolveKeyAsync(keyID1, CancellationToken.None).GetAwaiter().GetResult(); IKey cloudKey2 = cachingResolver.ResolveKeyAsync(keyID2, CancellationToken.None).GetAwaiter().GetResult(); // We begin with cloudKey1, and a resolver capable of resolving and caching Key Vault secrets. BlobEncryptionPolicy encryptionPolicy = new BlobEncryptionPolicy(cloudKey1, cachingResolver); client.DefaultRequestOptions.EncryptionPolicy = encryptionPolicy; client.DefaultRequestOptions.RequireEncryption = true; try { container.Create(); int size = 5 * 1024 * 1024; byte[] buffer1 = new byte[size]; byte[] buffer2 = new byte[size]; Random rand = new Random(); rand.NextBytes(buffer1); rand.NextBytes(buffer2); // Upload the first blob using the secret stored in Azure Key Vault. CloudBlockBlob blob = container.GetBlockBlobReference("blockblob1"); Console.WriteLine("Uploading Blob 1 using Key 1."); // Upload the encrypted contents to the first blob. using (MemoryStream stream = new MemoryStream(buffer1)) { blob.UploadFromStream(stream, size); } Console.WriteLine("Downloading and decrypting Blob 1."); // Download and decrypt the encrypted contents from the first blob. using (MemoryStream outputStream = new MemoryStream()) { blob.DownloadToStream(outputStream); } // At this point we will rotate our keys so new encrypted content will use the // second key. Note that the same resolver is used, as this resolver is capable // of decrypting blobs encrypted using either key. Console.WriteLine("Rotating the active encryption key to Key 2."); client.DefaultRequestOptions.EncryptionPolicy = new BlobEncryptionPolicy(cloudKey2, cachingResolver); // Upload the second blob using the key stored in Azure Key Vault. CloudBlockBlob blob2 = container.GetBlockBlobReference("blockblob2"); Console.WriteLine("Uploading Blob 2 using Key 2."); // Upload the encrypted contents to the second blob. using (MemoryStream stream = new MemoryStream(buffer2)) { blob2.UploadFromStream(stream, size); } Console.WriteLine("Downloading and decrypting Blob 2."); // Download and decrypt the encrypted contents from the second blob. using (MemoryStream outputStream = new MemoryStream()) { blob2.DownloadToStream(outputStream); } // Here we download and re-upload the first blob. This has the effect of updating // the blob to use the new key. using (MemoryStream memoryStream = new MemoryStream()) { Console.WriteLine("Downloading and decrypting Blob 1."); blob.DownloadToStream(memoryStream); memoryStream.Seek(0, SeekOrigin.Begin); Console.WriteLine("Re-uploading Blob 1 using Key 2."); blob.UploadFromStream(memoryStream); } // For the purposes of demonstration, we now override the encryption policy to only recognize key 2. BlobEncryptionPolicy key2OnlyPolicy = new BlobEncryptionPolicy(cloudKey2, null); BlobRequestOptions key2OnlyOptions = new BlobRequestOptions() { EncryptionPolicy = key2OnlyPolicy }; Console.WriteLine("Downloading and decrypting Blob 1."); // The first blob can still be decrypted because it is using the second key. using (MemoryStream outputStream = new MemoryStream()) { blob.DownloadToStream(outputStream, options: key2OnlyOptions); } Console.WriteLine("Press enter key to exit"); Console.ReadLine(); } finally { container.DeleteIfExists(); } }
private CloudBlobContainer GetDotnetBlobContainer(CloudStorageAccount storageAccount, string containerName) { CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); return(blobClient.GetContainerReference(containerName)); }
public BlobStorageRepository(BlobStorageRepositoryOptions configuration) { ConnectionString = configuration.ConnectionString ?? throw new ArgumentException("connectionString"); cloudBlobClient = GetClient(); minutesSasExpire = configuration.MinutesSasExpire; }
public static void AcquireAndReleaseLeaseTest(BlobContext context, string containerName, string blobName) { CloudStorageAccount account = new CloudStorageAccount(new StorageCredentials(context.Account, context.Key), false); CloudBlobClient client = new CloudBlobClient(new Uri(context.Address), account.Credentials); CloudBlobContainer container = client.GetContainerReference(containerName); CloudBlockBlob blob = container.GetBlockBlobReference(blobName); BlobTestBase.UploadText(blob, "Text sent to cloud", Encoding.UTF8); // acquire a release on the blob and check LeaseStatus to be "locked" OperationContext opContext = new OperationContext(); HttpWebRequest blobRequest = BlobHttpWebRequestFactory.Lease( blob.Uri, context.Timeout, LeaseAction.Acquire, null /* proposed lease ID */, 60 /* lease duration */, null /* break period */, null /* access condition */, opContext); BlobTests.SignRequest(blobRequest, context); string leaseId = null; using (HttpWebResponse response = (HttpWebResponse)blobRequest.GetResponse()) { leaseId = response.Headers["x-ms-lease-id"]; Assert.AreEqual<HttpStatusCode>(response.StatusCode, HttpStatusCode.Created); } blob.FetchAttributes(); Assert.AreEqual<LeaseStatus>(blob.Properties.LeaseStatus, LeaseStatus.Locked); // release the release on the blob and check LeaseStatus to be "unlocked" opContext = new OperationContext(); blobRequest = BlobHttpWebRequestFactory.Lease( blob.Uri, context.Timeout, LeaseAction.Release, null /* proposed lease ID */, null /* lease duration */, null /* break period */, AccessCondition.GenerateLeaseCondition(leaseId), opContext); BlobTests.SignRequest(blobRequest, context); using (HttpWebResponse response = (HttpWebResponse)blobRequest.GetResponse()) { Assert.AreEqual<HttpStatusCode>(response.StatusCode, HttpStatusCode.OK); } blob.FetchAttributes(); Assert.AreEqual<LeaseStatus>(blob.Properties.LeaseStatus, LeaseStatus.Unlocked); blob.Delete(); }
private CloudBlobClient GetCloudBlobClient() { return(cloudBlobClient = (cloudBlobClient ?? CloudStorageAccount.CreateCloudBlobClient())); }
/// <summary> /// Initializes a new instance of the <see cref="CloudBlockBlob"/> class using a relative URI to the blob. /// </summary> /// <param name="blobUri">The relative URI to the blob, beginning with the container name.</param> /// <param name="snapshotTime">The snapshot timestamp, if the blob is a snapshot.</param> /// <param name="client">A client object that specifies the endpoint for the Blob service.</param> public CloudBlockBlob(string blobUri, DateTime?snapshotTime, CloudBlobClient client) : base(blobUri, snapshotTime, client) { this.Properties.BlobType = BlobType.BlockBlob; }
public static void Run( [ServiceBusTrigger("site-updates-topic", "apply-template-subscription", AccessRights.Manage, Connection = "ManageTopicConnection")] BrokeredMessage updateMsg, [ServiceBus("new-sites-topic", Connection = "ManageTopicConnection")] ICollector <BrokeredMessage> newSitesTopic, ExecutionContext executionContext, TraceWriter log) { log.Info($"C# Service Bus trigger function '{FunctionName}' processed message: {updateMsg.MessageId} (Label': {updateMsg.Label}')"); /* * The following line should work, but doesn't, so small workaround here... */ //var applyProvisioningTemplateJobAsJson = updateMsg.GetBody<ApplyProvisioningTemplateJob>(); var stream = updateMsg.GetBody <Stream>(); StreamReader streamReader = new StreamReader(stream); string applyProvisioningTemplateJobAsJson = streamReader.ReadToEnd(); var applyProvisioningTemplateJob = JsonConvert.DeserializeObject <ApplyProvisioningTemplateJob>(applyProvisioningTemplateJobAsJson); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("AzureWebJobsStorage")); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(CloudConfigurationManager.GetSetting("JobFilesContainer")); var blob = container.GetBlobReference(applyProvisioningTemplateJob.FileNameWithExtension); var blobStream = new MemoryStream(); blob.DownloadToStream(blobStream); streamReader = new StreamReader(blobStream); blobStream.Position = 0; string blobContent = streamReader.ReadToEnd(); JObject provisioningJobFile = JObject.Parse(blobContent); var provisioningTemplateUrl = provisioningJobFile["ProvisioningTemplateUrl"].Value <string>(); var relativeUrl = provisioningJobFile["RelativeUrl"].Value <string>(); // get JSON result objects into a list IList <JToken> parameters = provisioningJobFile["TemplateParameters"].Children().ToList(); // serialize JSON results into .NET objects IDictionary <string, string> templateParameters = new Dictionary <string, string>(); foreach (JProperty parameter in parameters) { templateParameters.Add(parameter.Name, parameter.Value.ToObject <string>()); } var clientContextManager = new ClientContextManager(new BaseConfiguration(), new CertificateManager()); var provisioningSiteUrl = CloudConfigurationManager.GetSetting("ProvisioningSite"); using (var ctx = clientContextManager.GetAzureADAppOnlyAuthenticatedContext(provisioningSiteUrl)) { // Todo: get list title from configuration. // Assume that the web has a list named "PnPProvisioningJobs". List provisioningJobsList = ctx.Web.Lists.GetByTitle("PnPProvisioningJobs"); ListItem listItem = provisioningJobsList.GetItemById(applyProvisioningTemplateJob.ListItemID); // Write a new value to the PnPProvisioningJobStatus field of // the PnPProvisioningJobs item. listItem["PnPProvisioningJobStatus"] = "Running (applying template)"; listItem.Update(); ctx.ExecuteQuery(); var templateContainer = blobClient.GetContainerReference(CloudConfigurationManager.GetSetting("TemplateFilesContainer")); var templateFileName = Path.GetFileName(provisioningTemplateUrl); var templateBlob = templateContainer.GetBlobReference(templateFileName); var templateBlobStream = new MemoryStream(); templateBlob.DownloadToStream(templateBlobStream); var provisioningTemplate = new SiteTemplate(templateBlobStream).ProvisioningTemplate; log.Info($"(id {executionContext.InvocationId}) Retrieved template {templateFileName} from blob storage."); foreach (var parameter in templateParameters) { provisioningTemplate.Parameters[parameter.Key] = parameter.Value; } var ptai = new ProvisioningTemplateApplyingInformation { ProgressDelegate = (string message, int progress, int total) => { log.Info($"(id {executionContext.InvocationId})[Progress]: {progress:00}/{total:00} - {message}"); }, MessagesDelegate = (string message, ProvisioningMessageType messageType) => { log.Info($"(id {executionContext.InvocationId})[{messageType.ToString()}]: {message}"); }, }; var tenantUrl = new Uri(CloudConfigurationManager.GetSetting("TenantUrl")); Uri.TryCreate(tenantUrl, relativeUrl, out Uri fullSiteUrl); var templateAppliedWithOutAnyErrors = false; log.Info($"Opening ctx to {fullSiteUrl.AbsoluteUri}"); using (var newSiteContext = clientContextManager.GetAzureADAppOnlyAuthenticatedContext(fullSiteUrl.AbsoluteUri)) { int tryCount = 0; const int maxTries = 3; do { tryCount++; try { log.Info($"Applying the provisioning template {provisioningTemplateUrl} to {fullSiteUrl.AbsoluteUri}."); newSiteContext.Web.ApplyProvisioningTemplate(provisioningTemplate, ptai); log.Info($"Provisioning template has been applied to {fullSiteUrl.AbsoluteUri}."); templateAppliedWithOutAnyErrors = true; } catch (Exception ex) { log.Error($"Error occured while applying the provisioning template to {fullSiteUrl.AbsoluteUri}.", ex); templateAppliedWithOutAnyErrors = false; if (tryCount <= maxTries) { log.Warning($"An error occured while applying the provisioning template, but will try to apply the provisioning template to {fullSiteUrl.AbsoluteUri} once more. (max {maxTries} times, this was attempt number {tryCount}.)"); } else { log.Warning($"Tried {maxTries} times to apply the provisioning template without succes."); } } } while (templateAppliedWithOutAnyErrors == false && tryCount <= maxTries); } if (templateAppliedWithOutAnyErrors == true) { var setDefaultColumnValuesMsg = new BrokeredMessage(applyProvisioningTemplateJob, new DataContractJsonSerializer(typeof(ApplyProvisioningTemplateJob))) { ContentType = "application/json", Label = "SetDefaultColumnValues" }; newSitesTopic.Add(setDefaultColumnValuesMsg); listItem["PnPProvisioningJobStatus"] = "Provisioned"; } else { listItem["PnPProvisioningJobStatus"] = "Failed (error while applying template)"; } listItem.Update(); ctx.ExecuteQuery(); } }
static void AzureStorageBlobExamples() { // Getting configuration from the App.config file and get access to the CloudStorageAccount. // Azure access Key should is stored in the App.config String storageKeyValue = CloudConfigurationManager.GetSetting("AzureStorageAccount"); // Just in case to check whether reading the correct value Console.WriteLine("Storage Account Key Used" + storageKeyValue); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(storageKeyValue); // Getting the Azure Client CloudBlobClient cloudBlobClient = storageAccount.CreateCloudBlobClient(); // Getting reference for the container CloudBlobContainer cloudContainer = cloudBlobClient.GetContainerReference("images"); // Create the container (folder) if does not exists cloudContainer.CreateIfNotExists(); // Get the reference for the Blob in Azure CloudBlockBlob blobCloud = cloudContainer.GetBlockBlobReference("photo.png"); // Learning 1: Copy a file from local path in to Azure Blob storage //Open a local file and upload to the Azure using (var fileStream = System.IO.File.OpenRead(@"C:\Users\Administrator\Pictures\photo.jpg")) { blobCloud.UploadFromStream(fileStream); Console.WriteLine("File is uploaded to the cloud"); } // Learning 2: Download a blob from Azure to the local host // Just copy a blob from cloud to a local file. Assume the same file which has been uploaded in the previous step blobCloud.DownloadToFile(@"C:\Users\Administrator\Pictures\photocopy.jpg", FileMode.CreateNew); // Learning 3: List all blob objects URI from a container //List all blobs in a container var blobs = cloudContainer.ListBlobs(); foreach (var blob in blobs) { Console.Write(blob.Uri); } // Learning 4: List properties of a continer (Folder) and listing metadata of a container cloudContainer.FetchAttributes(); Console.WriteLine(cloudContainer.Properties.LastModified); Console.WriteLine(cloudContainer.Properties.ETag); var metaData = cloudContainer.Metadata; foreach (var metaDataItem in metaData) { Console.Write("Key " + metaDataItem.Key + " & "); Console.WriteLine("Value" + metaDataItem.Value); } // Learning 5: Setting metaData for a container // Method 1 cloudContainer.Metadata.Add("SampleKey", "SampleValue"); // Method 2 cloudContainer.Metadata["SecondSample"] = "Second Value"; // Dont ask me why FetchAttributes() and SetMetadata() ! why not SetAttributes() or GetMetaData? Microsoft Way! cloudContainer.SetMetadata(); // Learning 6: Setting permission for a container BlobContainerPermissions permissions = cloudContainer.GetPermissions(); Console.WriteLine("Container permission " + permissions.PublicAccess.ToString()); foreach (var sharedAccessPolicy in permissions.SharedAccessPolicies) { Console.WriteLine(sharedAccessPolicy.Key.ToString() + " = " + sharedAccessPolicy.Value.ToString()); } // In order to as per the parent container permissions.PublicAccess = BlobContainerPublicAccessType.Container; // In order to remove the public access permissions.PublicAccess = BlobContainerPublicAccessType.Off; //Finally set the permission cloudContainer.SetPermissions(permissions); // Learning 7: Azure copy from one blob to another // Create a new Block Blog Reference CloudBlockBlob copyBlob = cloudContainer.GetBlockBlobReference("photo-copy.jpg"); // Copy the original - blobCloud to the copyBlob copyBlob.StartCopy(new Uri(blobCloud.Uri.AbsoluteUri)); // Learning 8: Copy all blobs from one container to another CloudBlobContainer sourceContainer = cloudContainer; CloudBlobContainer targetContainer = cloudBlobClient.GetContainerReference("newimages"); targetContainer.CreateIfNotExists(); foreach (var blob in sourceContainer.ListBlobs()) { var sourceBlob = blob as CloudBlob; Console.WriteLine("Source Blob " + sourceBlob.Name); CloudBlockBlob newBlob = targetContainer.GetBlockBlobReference(sourceBlob.Name); newBlob.StartCopy(new Uri(blob.Uri.AbsoluteUri)); } // Learning 9: Rename the blob CloudBlockBlob sourceBlockBlob = cloudContainer.GetBlockBlobReference("photo-copy.jpg"); CloudBlockBlob targetBlockBlob = cloudContainer.GetBlockBlobReference("copy-photo.jpg"); targetBlockBlob.StartCopy(new Uri(sourceBlockBlob.Uri.AbsoluteUri)); while (targetBlockBlob.CopyState.Status == CopyStatus.Pending) { // Sleep for 3 seconds System.Threading.Thread.Sleep(2000); } sourceBlockBlob.Delete(); // Learning 10: Appending to a blob DateTime date = DateTime.Today; CloudBlobContainer logContainer = cloudBlobClient.GetContainerReference("logs"); CloudAppendBlob logBlog = logContainer.GetAppendBlobReference(string.Format("{0}{1}", date.ToString("yyyyMMdd"), ".log")); logContainer.CreateIfNotExists(); // If the append blog does not exists, create one if (!logBlog.Exists()) { logBlog.CreateOrReplace(); } // AppendText logBlog.AppendText(string.Format("{0} : Azure is rocking in the cloud space at ", date.ToString("HH:MM:ss"))); // Similar to the AppendText, there are // logBlog.AppendBlock // logBlog.AppendFromByteArray // logBlog.AppendFromFile // logBlog.AppendFromStream // Finally display the content of the log file. Console.WriteLine(logBlog.DownloadText()); // Learning 11: Multiple Chunk of file upload to Azure AzureStorageReference.Program.uploadLargeFiles(cloudBlobClient, @"C:\Users\Administrator\Pictures"); //Learning 12: Upload using Async AsyncCallback callBack = new AsyncCallback(x => Console.WriteLine("Copy Async Completed")); CloudBlockBlob copyAsync = cloudContainer.GetBlockBlobReference("newphoto.png"); copyAsync.BeginStartCopy(blobCloud, callBack, null); Console.WriteLine("Press any key to continue"); Console.ReadKey(); }
public AzureBlobProvider(IOptions <AzureBlobOptions> options) { _cloudStorageAccount = ParseConnectionString(options.Value.ConnectionString); _cloudBlobClient = _cloudStorageAccount.CreateCloudBlobClient(); _cdnUrl = options.Value.CdnUrl; }
/// <summary> /// Initializes a new instance of the <see cref="BlobService"/> class. /// </summary> /// <param name="settings"><see cref="AppSettings"/> instance.</param> /// <param name="client"><see cref="CloudBlobClient"/> instance.</param> public BlobService(AppSettings settings, CloudBlobClient client) { this._settings = settings ?? throw new ArgumentNullException(nameof(settings)); this._client = client ?? throw new ArgumentNullException(nameof(client)); }
public void Connect(string connectionString) { _client = CloudStorageAccount.Parse(connectionString).CreateCloudBlobClient(); }
public static async Task <IActionResult> Run([HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, ILogger log) { log.LogInformation("C# HTTP trigger function processed a request."); string responseMessage; string blobName = req.Query["blobName"]; string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); dynamic data = JsonConvert.DeserializeObject(requestBody); blobName = blobName ?? data?.blobName; string sourceStorage = Environment.GetEnvironmentVariable("NewImageSourceStorage"); CloudStorageAccount sourceStorageAccount = CloudStorageAccount.Parse(sourceStorage); string imageContainerName = Environment.GetEnvironmentVariable("ImageContainerName"); CloudBlobClient imageBlobClient = sourceStorageAccount.CreateCloudBlobClient(); CloudBlobContainer imageContainer = imageBlobClient.GetContainerReference(imageContainerName); CloudBlockBlob imageBlob = imageContainer.GetBlockBlobReference(blobName); ImageMetadata imageData = new ImageMetadata() { timestamp = DateTime.Now, uploadedFileName = blobName, id = Path.GetFileNameWithoutExtension(blobName) }; using (MemoryStream blobMemStream = new MemoryStream()) { await imageBlob.DownloadToStreamAsync(blobMemStream); byte[] byteData = blobMemStream.ToArray(); log.LogInformation("Image Byte Array:" + byteData); var client = new HttpClient(); // Request headers - replace this example key with your valid Prediction-Key. client.DefaultRequestHeaders.Add("Prediction-Key", Environment.GetEnvironmentVariable("CustomVisionPredictionKey")); // Prediction URL - replace this example URL with your valid Prediction URL. string rootUrl = Environment.GetEnvironmentVariable("CustomVisionRootUrl"); string iteration = Environment.GetEnvironmentVariable("CustomVisionIteration"); string url = rootUrl + iteration + "/image"; HttpResponseMessage response; // Request body. Try this sample with a locally stored image. using (var content = new ByteArrayContent(byteData)) { content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); response = await client.PostAsync(url, content); string responseBody = await response.Content.ReadAsStringAsync(); imageData = ProcessCustomVisionResults(responseBody, imageData); Console.WriteLine(responseBody); } if (imageData.isValidatedIssue) { log.LogInformation("Uploaded Image has been identified as an issue"); string metaContainerName = Environment.GetEnvironmentVariable("ImageMetadataContainer"); CloudBlobClient metaBlobClient = sourceStorageAccount.CreateCloudBlobClient(); CloudBlobContainer metaContainer = metaBlobClient.GetContainerReference(metaContainerName); await metaContainer.CreateIfNotExistsAsync(); string newMetaJson = System.Text.Json.JsonSerializer.Serialize <ImageMetadata>(imageData); CloudBlockBlob newMetaBlob = metaContainer.GetBlockBlobReference(imageData.id + ".json"); await newMetaBlob.UploadTextAsync(newMetaJson); responseMessage = imageData.id; } else { log.LogInformation("Uploaded Image was not identified as an Issue. Removing image from upload container..."); await imageBlob.DeleteIfExistsAsync(); responseMessage = "-1"; } } return(new OkObjectResult(responseMessage)); }
public async Task <String> IngresarImagen() { try { IFormFile file = Request.Form.Files[0]; var path = ""; string folderName = "Upload"; string webRootPath = _hostingEnvironment.ContentRootPath; string newPath = Path.Combine(webRootPath, folderName); // if (!Directory.Exists(newPath)) // { // Directory.CreateDirectory(newPath); // } if (file.Length > 0) { CloudStorageAccount storageAccount = CloudStorageAccount.Parse("DefaultEndpointsProtocol=https;AccountName=datossensores;AccountKey=x7W/PW86xtAuEJv+aOwyIYiG1dHC0+8EX3GP2ktC39B94ppau5PodjNvf+3NXsoG5/On6kJIQ9T2EtM5+HtAXA=="); // Create the blob client. CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); // Retrieve a reference to a container. CloudBlobContainer container = blobClient.GetContainerReference("imagenaparatos"); // Create the container if it doesn't already exist. await container.CreateIfNotExistsAsync(); string fileName = ContentDispositionHeaderValue.Parse(file.ContentDisposition).FileName.Trim('"'); string fullPath = Path.Combine(newPath, fileName); path = file.Name.Replace(".", "") + "(" + DateTime.Now.Day + "-" + DateTime.Now.Month + "-" + DateTime.Now.Year + "-" + DateTime.Now.TimeOfDay.Ticks + ")" + Path.GetExtension(fullPath); //var blob = container.GetBlockBlobReference(fileName); //blob.DeleteIfExistsAsync(); CloudBlockBlob blockBlob = container.GetBlockBlobReference(path); //using (var stream = new FileStream(fullPath, FileMode.Create)) //{ // file.CopyTo(stream); //} using (Stream fileStream = file.OpenReadStream()) { await blockBlob.UploadFromStreamAsync(fileStream); } // Create or overwrite the "myblob" blob with contents from a local file. //using (var fileStream = file1.OpenRead()) //{ //} //string fullPath = Path.Combine(newPath, fileName); //using (var stream = new FileStream(fullPath, FileMode.Create)) //{ // file.CopyTo(stream); //} HttpResponseMessage respuesta = new HttpResponseMessage(HttpStatusCode.OK); respuesta.RequestMessage = new HttpRequestMessage(); respuesta.Headers.Add("data", path); respuesta.RequestMessage.Content = new StringContent(path); return(path); } else { return("Sin Imagen"); } } catch (System.Exception ex) { var mensjae = ex.Message; return("Sin Imagen"); } }
private static void RunApplication(ConfigWrapper config) { client = CreateMediaServicesClient(config); // Set the polling interval for long running operations to 2 seconds. // The default value is 30 seconds for the .NET client SDK client.LongRunningOperationRetryTimeout = 2; // Connect to Storage. CloudStorageAccount storageAccount = CloudStorageAccount.Parse(config.StorageConnectionString); CloudBlobClient cloudBlobClient = storageAccount.CreateCloudBlobClient(); StartEndpointIfNotRunning(config); // Get a list of all of the locators and enumerate through them a page at a time. IPage <StreamingLocator> firstPage = client.StreamingLocators.List(config.ResourceGroup, config.AccountName); IPage <StreamingLocator> currentPage = firstPage; do { bool always = false; foreach (StreamingLocator locator in currentPage) { // Get the asset associated with the locator. Asset asset = client.Assets.Get(config.ResourceGroup, config.AccountName, locator.AssetName); // Get the Storage continer associated with the asset. CloudBlobContainer storageContainer = cloudBlobClient.GetContainerReference(asset.Container); // Get a manifest file list from the Storage container. List <string> fileList = GetFilesListFromStorage(storageContainer); string ismcFileName = fileList.Where(a => a.ToLower().Contains(".ismc")).FirstOrDefault(); string ismManifestFileName = fileList.Where(a => a.ToLower().EndsWith(".ism")).FirstOrDefault(); // If there is no .ism then there's no reason to continue. If there's no .ismc we need to add it. if (ismManifestFileName != null && ismcFileName == null) { Console.WriteLine("Asset {0} does not have an ISMC file.", asset.Name); if (!always) { Console.WriteLine("Add the ISMC? (y)es, (n)o, (a)lways, (q)uit"); ConsoleKeyInfo response = Console.ReadKey(); string responseChar = response.Key.ToString(); if (responseChar.Equals("N")) { continue; } if (responseChar.Equals("A")) { always = true; } else if (!(responseChar.Equals("Y"))) { break; // At this point anything other than a 'yes' should quit the loop/application. } } string streamingUrl = GetStreamingUrlsAndDrmType(client, config.ResourceGroup, config.AccountName, locator.Name); // We should only have two items in the list. First is the Smooth Streaming URL, and second is the DRM scheme if (streamingUrl.Length == 0) { // error state, skip this asset. We shouldn't ever be here. continue; } string ismcContentXml = SendManifestRequest(new Uri(streamingUrl)); if (ismcContentXml.Length == 0) { //error state, skip this asset continue; } if (ismcContentXml.IndexOf("<Protection>") > 0) { Console.WriteLine("Content is encrypted. Removing the protection header from the client manifest."); //remove DRM from the ISCM manifest ismcContentXml = Xml.RemoveXmlNode(ismcContentXml); } string newIsmcFileName = ismManifestFileName.Substring(0, ismManifestFileName.IndexOf(".")) + ".ismc"; CloudBlockBlob ismcBlob = WriteStringToBlob(ismcContentXml, newIsmcFileName, storageContainer); // Download the ISM so that we can modify it to include the ISMC file link. string ismXmlContent = GetFileXmlFromStorage(storageContainer, ismManifestFileName); ismXmlContent = Xml.AddIsmcToIsm(ismXmlContent, newIsmcFileName); WriteStringToBlob(ismXmlContent, ismManifestFileName, storageContainer); // update the ism to point to the ismc (download, modify, delete original, upload new) } } // Continue on to the next page of locators. try { currentPage = client.StreamingLocators.ListNext(currentPage.NextPageLink); } catch (Exception) { // we'll get here at the end of the page when the page is empty. This is okay. } } while (currentPage.NextPageLink != null); }
/// <summary> /// Removes file from storage account /// </summary> /// <param name="storageName">Store which has file to remove</param> /// <param name="storageKey">Store access key</param> private static void RemoveFile(string storageName, string storageKey) { string baseAddress = General.BlobEndpointUri(storageName); var credentials = new StorageCredentialsAccountAndKey(storageName, storageKey); var client = new CloudBlobClient(baseAddress, credentials); CloudBlobContainer container = client.GetContainerReference(ContainerName); if (Exists(container)) { container.Delete(); } }
public GetImages(BlobUtils blobUtils) { _blobClient = blobUtils.ConfigureClient(); }
private ServiceProperties CurrentProperties(CloudBlobClient blobClient) { var currentProperties = blobClient.GetServiceProperties(); if (currentProperties != null) { if (currentProperties.Cors != null) { Console.WriteLine("Cors.CorsRules.Count : " + currentProperties.Cors.CorsRules.Count); for (int index = 0; index < currentProperties.Cors.CorsRules.Count; index++) { var corsRule = currentProperties.Cors.CorsRules[index]; Console.WriteLine("corsRule[index] : " + index); foreach (var allowedHeader in corsRule.AllowedHeaders) { Console.WriteLine("corsRule.AllowedHeaders : " + allowedHeader); } Console.WriteLine("corsRule.AllowedMethods : " + corsRule.AllowedMethods); foreach (var allowedOrigins in corsRule.AllowedOrigins) { Console.WriteLine("corsRule.AllowedOrigins : " + allowedOrigins); } foreach (var exposedHeaders in corsRule.ExposedHeaders) { Console.WriteLine("corsRule.ExposedHeaders : " + exposedHeaders); } Console.WriteLine("corsRule.MaxAgeInSeconds : " + corsRule.MaxAgeInSeconds); } } Console.WriteLine("DefaultServiceVersion : " + currentProperties.DefaultServiceVersion); Console.WriteLine("HourMetrics.MetricsLevel : " + currentProperties.HourMetrics.MetricsLevel); Console.WriteLine("HourMetrics.RetentionDays : " + currentProperties.HourMetrics.RetentionDays); Console.WriteLine("HourMetrics.Version : " + currentProperties.HourMetrics.Version); Console.WriteLine("Logging.LoggingOperations : " + currentProperties.Logging.LoggingOperations); Console.WriteLine("Logging.RetentionDays : " + currentProperties.Logging.RetentionDays); Console.WriteLine("Logging.Version : " + currentProperties.Logging.Version); Console.WriteLine("MinuteMetrics.MetricsLevel : " + currentProperties.MinuteMetrics.MetricsLevel); Console.WriteLine("MinuteMetrics.RetentionDays : " + currentProperties.MinuteMetrics.RetentionDays); Console.WriteLine("MinuteMetrics.Version : " + currentProperties.MinuteMetrics.Version); } return currentProperties; }
public async Task <ActionResult> GenerateXML() { CloudBlobContainer cloudBlobContainer; if (!local) { string storageConnection = _configuration.GetSection("myStorage").Value; CloudStorageAccount cloudStorageAccount = CloudStorageAccount.Parse(storageConnection); //create a block blob CloudBlobClient cloudBlobClient = cloudStorageAccount.CreateCloudBlobClient(); cloudBlobContainer = cloudBlobClient.GetContainerReference("found"); } var allMeteors = _context.SkyImages.Where(y => y.selectedForTraining == true).Include(x => x.detectedObjects) .ThenInclude(y => y.bbox); foreach (SkyImage skyImage in allMeteors) { //generate xml annotation file XElement xmlTree = new XElement("annotation"); XElement aFilename = new XElement("filename"); aFilename.Value = skyImage.filename; xmlTree.Add(aFilename); XElement aSize = new XElement("size"); aSize.Add(new XElement("width", skyImage.width)); aSize.Add(new XElement("height", skyImage.height)); aSize.Add(new XElement("depth", "1")); xmlTree.Add(aSize); foreach (SkyObjectDetection sod in skyImage.detectedObjects) { XElement anObject = new XElement("object"); anObject.Add(new XElement("score", sod.score)); anObject.Add(new XElement("name", sod.skyObjectClass)); XElement bndBox = new XElement("bndbox"); bndBox.Add(new XElement("xmin", sod.bbox.xmin)); bndBox.Add(new XElement("ymin", sod.bbox.ymin)); bndBox.Add(new XElement("xmax", sod.bbox.xmax)); bndBox.Add(new XElement("ymax", sod.bbox.ymax)); anObject.Add(bndBox); xmlTree.Add(anObject); } xmlTree.FirstNode.AddAfterSelf(new XElement("camera", skyImage.camera)); xmlTree.FirstNode.AddAfterSelf(new XElement("dateTaken", skyImage.date)); if (!local) { CloudBlockBlob cloudBlockBlob = cloudBlobContainer.GetBlockBlobReference(skyImage.filename.Replace(".jpg", ".xml")); cloudBlockBlob.Properties.ContentType = "text/xml"; //updload xml await cloudBlockBlob.UploadTextAsync(xmlTree.ToString()); } else { xmlTree.Save("\\home\\site\\wwwroot\\images\\" + skyImage.filename.Replace(".jpg", ".xml")); } } return(NoContent()); }
public BlobStorageManager(CloudBlobClient cloudBlobClient) { _cloudBlobClient = cloudBlobClient; }
static void Main(string[] args) { CloudBlobClient myBlobClient = storageAccount.CreateCloudBlobClient(); myBlobClient.SingleBlobUploadThresholdInBytes = 1024 * 1024; CloudBlobContainer container = myBlobClient.GetContainerReference("adokontajnerneki"); //container.CreateIfNotExists(); CloudBlockBlob myBlob = container.GetBlockBlobReference("cfx.zip"); var blockSize = 256 * 1024; myBlob.StreamWriteSizeInBytes = blockSize; var fileName = @"D:\cfx.zip"; long bytesToUpload = (new FileInfo(fileName)).Length; long fileSize = bytesToUpload; if (bytesToUpload < blockSize) { CancellationToken ca = new CancellationToken(); var ado = myBlob.UploadFromFileAsync(fileName, FileMode.Open, ca); Console.WriteLine(ado.Status); //Does Not Help Much ado.ContinueWith(t => { Console.WriteLine("Status = " + t.Status); Console.WriteLine("It is over"); //this is working OK }); } else { List <string> blockIds = new List <string>(); int index = 1; long startPosition = 0; long bytesUploaded = 0; do { var bytesToRead = Math.Min(blockSize, bytesToUpload); var blobContents = new byte[bytesToRead]; using (FileStream fs = new FileStream(fileName, FileMode.Open)) { fs.Position = startPosition; fs.Read(blobContents, 0, (int)bytesToRead); } ManualResetEvent mre = new ManualResetEvent(false); var blockId = Convert.ToBase64String(Encoding.UTF8.GetBytes(index.ToString("d6"))); Console.WriteLine("Now uploading block # " + index.ToString("d6")); blockIds.Add(blockId); var ado = myBlob.PutBlockAsync(blockId, new MemoryStream(blobContents), null); ado.ContinueWith(t => { bytesUploaded += bytesToRead; bytesToUpload -= bytesToRead; startPosition += bytesToRead; index++; double percentComplete = (double)bytesUploaded / (double)fileSize; Console.WriteLine("Percent complete = " + percentComplete.ToString("P")); mre.Set(); }); mre.WaitOne(); }while (bytesToUpload > 0); Console.WriteLine("Now committing block list"); var pbl = myBlob.PutBlockListAsync(blockIds); pbl.ContinueWith(t => { Console.WriteLine("Blob uploaded completely."); }); } Console.ReadKey(); }
public AzureBlobHelper(string connectionString ) { this._storageAccount = CloudStorageAccount.Parse( connectionString ); this._blobClient = this._storageAccount.CreateCloudBlobClient(); }
public async Task <List <ImageLists> > UploadImages(List <Stream> strmLists, List <string> lstContntTypes) { string myContainerName = "Test007"; string assetID = CreateBLOBContainer(myContainerName); assetID = assetID.Replace("nb:cid:UUID:", "asset-"); List <ImageLists> retCollection = new List <ImageLists>(); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(myAzureStorageConSetting); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(assetID); container.SetPermissions( new BlobContainerPermissions { PublicAccess = BlobContainerPublicAccessType.Blob }); if (strmLists != null) { for (int i = 0; i < strmLists.Count; i++) { string strExtension = string.Empty; if (lstContntTypes[i] == "image/gif") { strExtension = ".gif"; } else if (lstContntTypes[i] == "image/jpeg") { strExtension = ".jpeg"; } else if (lstContntTypes[i] == "image/jpg") { strExtension = ".jpg"; } else if (lstContntTypes[i] == "image/png") { strExtension = ".png"; } ImageLists img = new ImageLists(); string imgGUID = Guid.NewGuid().ToString(); CloudBlockBlob blockBlob = container.GetBlockBlobReference(string.Concat(imgGUID, strExtension)); await blockBlob.UploadFromStreamAsync(strmLists[i]); img.ImageID = new Guid(imgGUID); img.Title = string.Concat(imgGUID, strExtension); img.ImageSize = strmLists[i].Length; img.AssetID = assetID; retCollection.Add(img); CloudBlockBlob blockblobthumb = container.GetBlockBlobReference(string.Concat(imgGUID, "_thumb", strExtension)); Stream strmThumb = ResizeImage(strmLists[i]); using (strmThumb) { await blockblobthumb.UploadFromStreamAsync(strmThumb); img = new ImageLists(); img.ImageID = new Guid(imgGUID); img.Title = string.Concat(imgGUID, "_thumb", strExtension); img.ImageSize = strmThumb.Length; img.AssetID = assetID; retCollection.Add(img); } } } return(retCollection); }
private static Task<WebResponse> GetResponseAsync(this WebRequest req, CloudBlobClient service) { return new APMTask<WebResponse>( req.BeginGetResponse, (asyncresult) => service.EndGetResponse(asyncresult, req), req.Abort); }
public async Task <string> FileUpload(long id, IFormFile[] files) { var claimsIdentity = User.Identity as ClaimsIdentity; var userId = Convert.ToInt64(claimsIdentity.Claims.FirstOrDefault(claim => claim.Type == "Id").Value); var user = _multiSourcePlaylistRepository.GetUser(userId); var filePath = Path.Combine( "uploads", user.FileFolder); var uploads = Path.Combine( _environment.ContentRootPath, filePath); //string url = UriHelper.GetDisplayUrl(Request);//http://localhost:8080/api/fileupload/1 //var urlParts = url.Split(new[] { "api/fileupload" }, StringSplitOptions.None); //var baseUrl = urlParts[0]; var playlist = _multiSourcePlaylistRepository.GetPlaylist(id); var allTracks = _multiSourcePlaylistRepository.GetAllTracks(); int lastOrder = 0; List <Track> temp = new List <Track>(); if (allTracks != null) { temp = allTracks.Where(y => y.Playlist.Id == id).ToList(); } if (temp != null && temp.Any()) { lastOrder = temp.OrderByDescending(x => x.Order).FirstOrDefault().Order + 1; } //@string.Format("{0}://{1}{2}{3}", Context.Request.Scheme, Context.Request.Host, Context.Request.Path, Context.Request.QueryString) CloudStorageAccount storageAccount = CloudStorageAccount.Parse( _configuration["Production:StorageConnectionString"]); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(user.FileFolder); // Ensure that the share exists. if (await container.ExistsAsync()) { int bytesToMegaBytes = 1048576; var totalSize = await isThereDiscSpaceInAzure(container); if ((totalSize) / bytesToMegaBytes > 10000) { return("NO_DISC_SPACE"); } } else { return("User container doesn't exists."); } foreach (var file in files) { try { var filename = file.FileName; var fullpath = Path.Combine(uploads, filename); CloudBlockBlob blob = container.GetBlockBlobReference(filename); if (!await blob.ExistsAsync()) { if (file.Length > 0) { using (var fileStream = file.OpenReadStream()) { await blob.UploadFromStreamAsync(fileStream); } } } using (var fileStream = new FileStream(fullpath, FileMode.Create)) { await file.CopyToAsync(fileStream); fileStream.Flush(); fileStream.Dispose(); } Track fileTrack = new Track(); fileTrack.Address = file.FileName; fileTrack.Playlist = playlist; fileTrack.Type = 5; fileTrack.Order = lastOrder; fileTrack.Name = getTrackName(fullpath);//hanki bändi ja kappale mp3 tiedoston metasta _multiSourcePlaylistRepository.PostTrack(fileTrack); ++lastOrder; System.IO.File.Delete(fullpath); } catch (Exception ex) { return(ex.Message); } } return("File was Uploaded"); }
/// <summary> /// Build process step chaing base on process type configuration /// </summary> /// <param name="processTypeId"></param> /// <returns></returns> private List <StepHandler> BuildChain(string processTypeId) { StepHandler prevStep = null; List <StepHandler> auxSteps = new List <StepHandler>(); string jsonTxt; try { jsonTxt = ReadConfigOrDefault(processTypeId + ".ChainConfig"); if (string.IsNullOrEmpty(jsonTxt)) { throw new Exception(processTypeId + " Not Found, check ButlerConfiguration Table"); } } catch (Exception X) { throw new Exception("[Error at BuildChain] Process " + X.Message); } //Sensible config manually List <stepTypeInfo> StepList = Newtonsoft.Json.JsonConvert.DeserializeObject <List <stepTypeInfo> >(jsonTxt); foreach (stepTypeInfo item in StepList) { //Build the chain //1. is the Assembly in bin? if (!File.Exists(item.AssemblyName)) { //try to download from storage try { CloudStorageAccount storageAccount = CloudStorageAccount.Parse(myProcessConfigConn); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference("mediabutlerbin"); //TODO: fix this is DLL exis and is it on use foreach (IListBlobItem dll in container.ListBlobs(null, false)) { Uri myUri = dll.Uri; int seg = myUri.Segments.Length - 1; string name = myUri.Segments[seg]; CloudBlockBlob blockBlob = container.GetBlockBlobReference(name); using (var fileStream = System.IO.File.OpenWrite(@".\" + name)) { blockBlob.DownloadToStream(fileStream); } } if (!File.Exists(item.AssemblyName)) { throw new Exception(item.AssemblyName + " don't exist"); } } catch (Exception X) { string txt = string.Format("[{0}] Error BuildChain Assembly {1} error: {2}", this.GetType().FullName, item.AssemblyName, X.Message); Trace.TraceError(txt); throw X; } } StepHandler obj = (StepHandler)Activator.CreateComInstanceFrom(item.AssemblyName, item.TypeName).Unwrap(); if ((item.ConfigKey != null) && (item.ConfigKey != "")) { //LOAD STRING CONFIGURATION FOR CONFIG TABLE obj.StepConfiguration = this.ReadConfigOrDefault(item.ConfigKey + ".StepConfig"); } auxSteps.Add(obj); if (prevStep != null) { prevStep.SetSuccessor(obj); } prevStep = obj; } return(auxSteps); }
private void OnInitialize() { var selfInstance = InstanceEnumerator.EnumerateInstances().First(i => i.IsSelf); cloudStorageAccount = CloudStorageAccount.Parse(RoleEnvironment.GetConfigurationSettingValue(ConfigurationSettingsKeys.StorageConnectionString)); log.Info("Storage account selected: {0}", cloudStorageAccount.BlobEndpoint); cloudBlobClient = cloudStorageAccount.CreateCloudBlobClient(); log.Info("Storage client created"); var containerName = ConfigurationProvider.GetSetting(ConfigurationSettingsKeys.StorageContainerName, "ravendb"); // In order to force a connection we just enumerate all available containers: var availableContainers = cloudBlobClient.ListContainers().ToArray(); foreach (var container in availableContainers) { log.Info("Available container: {0}", container.Name); } if (!availableContainers.Any(c => c.Name.Equals(containerName))) { log.Info("Container {0} does not exist, creating", containerName); // Container does not exist: cloudBlobClient.GetContainerReference(containerName).Create(); } cloudBlobContainer = cloudBlobClient.GetContainerReference(containerName); log.Info("Container {0} selected", cloudBlobContainer.Name); localCache = RoleEnvironment.GetLocalResource(ConfigurationSettingsKeys.StorageCacheResource); log.Info("Cache resource retrieved: {0}, path: {1}", localCache.Name, localCache.RootPath); CloudDrive.InitializeCache(localCache.RootPath, localCache.MaximumSizeInMegabytes); log.Info("Cache initialized: {0} mb", localCache.MaximumSizeInMegabytes); var driveName = string.Format("{0}{1}.vhd", selfInstance.InstanceType, selfInstance.InstanceIndex).ToLowerInvariant(); log.Info("Virtual drive name: {0}", driveName); var pageBlob = cloudBlobContainer.GetPageBlobReference(driveName); log.Info("Virtual drive blob: {0}", pageBlob.Uri); cloudDrive = cloudStorageAccount.CreateCloudDrive(pageBlob.Uri.ToString()); log.Info("Virtual drive created: {0}", cloudDrive.Uri); var storageSize = ConfigurationProvider.GetSetting(ConfigurationSettingsKeys.StorageSize, 50000); log.Info("Storage size: {0} mb", storageSize); cloudDrive.CreateIfNotExist(storageSize); log.Info("Virtual drive initialized: {0}", cloudDrive.Uri); var mountedDirectoryPath = cloudDrive.Mount(storageSize, DriveMountOptions.None); log.Info("Virtual drive mounted at: {0}", mountedDirectoryPath); mountedDirectory = new DirectoryInfo(mountedDirectoryPath); log.Info("Ensuring drive is available: {0}", mountedDirectoryPath); UpdateTestFile(); log.Info("Storage initialization succeeded"); }