public void BlobUriBuilder_LocalDockerUrl_AzuriteBlob() { // Arrange var uriString = "http://azure-storage-emulator-azurite:10000/devstoreaccount1/container/blob"; var originalUri = new UriBuilder(uriString); // Act var blobUriBuilder = new BlobUriBuilder(originalUri.Uri); Uri newUri = blobUriBuilder.ToUri(); // Assert Assert.AreEqual("http", blobUriBuilder.Scheme); Assert.AreEqual("azure-storage-emulator-azurite", blobUriBuilder.Host); Assert.AreEqual("devstoreaccount1", blobUriBuilder.AccountName); Assert.AreEqual("container", blobUriBuilder.BlobContainerName); Assert.AreEqual("blob", blobUriBuilder.BlobName); Assert.AreEqual("", blobUriBuilder.Snapshot); Assert.IsNull(blobUriBuilder.Sas); Assert.AreEqual("", blobUriBuilder.Query); Assert.AreEqual(10000, blobUriBuilder.Port); Assert.AreEqual(originalUri, newUri); }
public void BlobUriBuilder_CustomUri_AccountContainerBlobTest() { // Arrange var uriString = "https://www.mycustomname.com/containername/blobname"; var originalUri = new UriBuilder(uriString); // Act var blobUriBuilder = new BlobUriBuilder(originalUri.Uri); Uri newUri = blobUriBuilder.ToUri(); // Assert Assert.AreEqual("https", blobUriBuilder.Scheme); Assert.AreEqual("www.mycustomname.com", blobUriBuilder.Host); Assert.AreEqual(String.Empty, blobUriBuilder.AccountName); Assert.AreEqual("containername", blobUriBuilder.BlobContainerName); Assert.AreEqual("blobname", blobUriBuilder.BlobName); Assert.AreEqual("", blobUriBuilder.Snapshot); Assert.IsNull(blobUriBuilder.Sas); Assert.AreEqual("", blobUriBuilder.Query); Assert.AreEqual(443, blobUriBuilder.Port); Assert.AreEqual(originalUri, newUri); }
public void BlobUriBuilder_IPStyleUrl_PortTest() { // Arrange var uriString = "https://127.0.0.1:8080/account/container"; var originalUri = new UriBuilder(uriString); // Act var blobUriBuilder = new BlobUriBuilder(originalUri.Uri); var newUri = blobUriBuilder.ToUri(); // Assert Assert.AreEqual("https", blobUriBuilder.Scheme); Assert.AreEqual("127.0.0.1", blobUriBuilder.Host); Assert.AreEqual("account", blobUriBuilder.AccountName); Assert.AreEqual("container", blobUriBuilder.ContainerName); Assert.AreEqual("", blobUriBuilder.BlobName); Assert.AreEqual("", blobUriBuilder.Snapshot); Assert.IsNull(blobUriBuilder.Sas); Assert.AreEqual("", blobUriBuilder.UnparsedParams); Assert.AreEqual(8080, blobUriBuilder.Port); Assert.AreEqual(originalUri, newUri); }
public async Task <IActionResult> GetFileContentAsync( [FromBody, SwaggerRequestBody("Path to the file", Required = true)] FileContentParams parms) { _logger.LogInformation("Getting content for file path: {path}", parms.Path); string storageConnectionString = _appSettings.MediaStorageConnectionString; string containerName = _appSettings.MediaStorageContainer; // Authenticate with storage and download image via URL var blobUriBuilder = new BlobUriBuilder(new Uri(parms.Path)); BlobClient blobClient = new BlobClient(storageConnectionString, containerName, blobUriBuilder.BlobName); try { BlobDownloadInfo download = await blobClient.DownloadAsync(); return(File(download.Content, download.ContentType)); } catch (RequestFailedException e) when(e.ErrorCode == BlobErrorCode.BlobNotFound) { return(NotFound()); } }
public void BlobUriBuilder_IPStyleUrl_VersionIdTest() { // Arrange var uriString = "https://127.0.0.1/account/container/blob?versionid=2011-03-09T01:42:34.9360000Z"; var originalUri = new UriBuilder(uriString); // Act var blobUriBuilder = new BlobUriBuilder(originalUri.Uri); Uri newUri = blobUriBuilder.ToUri(); // Assert Assert.AreEqual("https", blobUriBuilder.Scheme); Assert.AreEqual("127.0.0.1", blobUriBuilder.Host); Assert.AreEqual("account", blobUriBuilder.AccountName); Assert.AreEqual("container", blobUriBuilder.BlobContainerName); Assert.AreEqual("blob", blobUriBuilder.BlobName); Assert.AreEqual("2011-03-09T01:42:34.9360000Z", blobUriBuilder.VersionId); Assert.IsNull(blobUriBuilder.Sas); Assert.AreEqual("", blobUriBuilder.Query); Assert.AreEqual(443, blobUriBuilder.Port); Assert.AreEqual(originalUri, newUri); }
public void BlobUriBuilder_IPStyleUrl_AccountTest() { // Arrange var uriString = "https://127.0.0.1/account?comp=list"; var originalUri = new UriBuilder(uriString); // Act var blobUriBuilder = new BlobUriBuilder(originalUri.Uri); Uri newUri = blobUriBuilder.ToUri(); // Assert Assert.AreEqual("https", blobUriBuilder.Scheme); Assert.AreEqual("127.0.0.1", blobUriBuilder.Host); Assert.AreEqual("account", blobUriBuilder.AccountName); Assert.AreEqual("", blobUriBuilder.BlobContainerName); Assert.AreEqual("", blobUriBuilder.BlobName); Assert.AreEqual("", blobUriBuilder.Snapshot); Assert.IsNull(blobUriBuilder.Sas); Assert.AreEqual("comp=list", blobUriBuilder.Query); Assert.AreEqual(443, blobUriBuilder.Port); Assert.AreEqual(originalUri, newUri); }
// </Snippet_UseAccountSAS> #region GetUserDelegationSasBlob // <Snippet_GetUserDelegationSasBlob> async static Task <Uri> GetUserDelegationSasBlob(BlobClient blobClient) { BlobServiceClient blobServiceClient = blobClient.GetParentBlobContainerClient().GetParentBlobServiceClient(); // Get a user delegation key for the Blob service that's valid for 7 days. // You can use the key to generate any number of shared access signatures // over the lifetime of the key. UserDelegationKey userDelegationKey = await blobServiceClient.GetUserDelegationKeyAsync(DateTimeOffset.UtcNow, DateTimeOffset.UtcNow.AddDays(7)); // Create a SAS token that's valid for 7 days. BlobSasBuilder sasBuilder = new BlobSasBuilder() { BlobContainerName = blobClient.BlobContainerName, BlobName = blobClient.Name, Resource = "b", ExpiresOn = DateTimeOffset.UtcNow.AddDays(7) }; // Specify read and write permissions for the SAS. sasBuilder.SetPermissions(BlobSasPermissions.Read | BlobSasPermissions.Write); // Add the SAS token to the blob URI. BlobUriBuilder blobUriBuilder = new BlobUriBuilder(blobClient.Uri) { // Specify the user delegation key. Sas = sasBuilder.ToSasQueryParameters(userDelegationKey, blobServiceClient.AccountName) }; Console.WriteLine("Blob user delegation SAS URI: {0}", blobUriBuilder); Console.WriteLine(); return(blobUriBuilder.ToUri()); }
/// <summary> /// Azure storage blob constructor /// </summary> /// <param name="blob">ICloud blob object</param> public AzureStorageBlob(TaggedBlobItem blob, AzureStorageContext storageContext, string continuationToken = null, BlobClientOptions options = null, bool getProperties = false) { // Get Track2 blob client BlobUriBuilder uriBuilder = new BlobUriBuilder(storageContext.StorageAccount.BlobEndpoint) { BlobContainerName = blob.BlobContainerName, BlobName = blob.BlobName }; Uri blobUri = uriBuilder.ToUri(); if (storageContext.StorageAccount.Credentials.IsSAS) { blobUri = new Uri(blobUri.ToString() + storageContext.StorageAccount.Credentials.SASToken); } this.privateBlobBaseClient = Util.GetTrack2BlobClient(blobUri, storageContext, options); // Set continuationToken if (continuationToken != null) { BlobContinuationToken token = new BlobContinuationToken(); token.NextMarker = continuationToken; this.ContinuationToken = token; ICloudBlob = storageContext.StorageAccount.CreateCloudBlobClient().GetContainerReference(blob.BlobContainerName).GetBlobReference(blob.BlobName); } // Set other properties if (!getProperties) { getLazyProperties = false; Name = blob.BlobName; this.Context = storageContext; } else { SetProperties(this.privateBlobBaseClient, storageContext, null, options); } }
public async Task BlobSasBuilder_PreauthorizedAgentObjectId() { // Arrange BlobServiceClient oauthService = GetServiceClient_OauthAccount(); string containerName = GetNewContainerName(); string preauthorizedAgentGuid = Recording.Random.NewGuid().ToString(); await using DisposingContainer test = await GetTestContainerAsync(service : oauthService, containerName : containerName); // Arrange Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync( startsOn : null, expiresOn : Recording.UtcNow.AddHours(1)); BlobSasBuilder BlobSasBuilder = new BlobSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), BlobContainerName = containerName, PreauthorizedAgentObjectId = preauthorizedAgentGuid }; BlobSasBuilder.SetPermissions(BlobSasPermissions.All); BlobUriBuilder BlobUriBuilder = new BlobUriBuilder(test.Container.Uri) { Sas = BlobSasBuilder.ToSasQueryParameters(userDelegationKey, test.Container.AccountName) }; BlobContainerClient containerClient = InstrumentClient(new BlobContainerClient(BlobUriBuilder.ToUri(), GetOptions())); // Act BlobClient blobClient = containerClient.GetBlobClient(GetNewBlobName()); await blobClient.UploadAsync(new MemoryStream()); await blobClient.ExistsAsync(); }
public async Task ContainerSas_AllPermissions() { // Arrange await using DisposingContainer test = await GetTestContainerAsync(); string blobName = GetNewBlobName(); BlobSasBuilder blobSasBuilder = new BlobSasBuilder( permissions: BlobContainerSasPermissions.All, expiresOn: Recording.UtcNow.AddDays(1)) { BlobContainerName = test.Container.Name, }; BlobUriBuilder blobUriBuilder = new BlobUriBuilder(test.Container.Uri) { BlobName = blobName, Sas = blobSasBuilder.ToSasQueryParameters(GetNewSharedKeyCredentials()) }; // Act AppendBlobClient appendBlobClient = InstrumentClient(new AppendBlobClient(blobUriBuilder.ToUri(), GetOptions())); await appendBlobClient.CreateAsync(); }
public void BlobUriBuilder_LocalDockerUrl_PortTestBlob() { // Arrange // BlobEndpoint from https://docs.microsoft.com/en-us/azure/storage/common/storage-use-emulator#connect-to-the-emulator-account-using-the-well-known-account-name-and-key var uriString = "http://docker_container:10000/devstoreaccount1/containername/blobname"; var originalUri = new UriBuilder(uriString); // Act var blobUriBuilder = new BlobUriBuilder(originalUri.Uri); Uri newUri = blobUriBuilder.ToUri(); // Assert Assert.AreEqual("http", blobUriBuilder.Scheme); Assert.AreEqual("docker_container", blobUriBuilder.Host); Assert.AreEqual("devstoreaccount1", blobUriBuilder.AccountName); Assert.AreEqual("containername", blobUriBuilder.BlobContainerName); Assert.AreEqual("blobname", blobUriBuilder.BlobName); Assert.AreEqual("", blobUriBuilder.Snapshot); Assert.IsNull(blobUriBuilder.Sas); Assert.AreEqual("", blobUriBuilder.Query); Assert.AreEqual(10000, blobUriBuilder.Port); Assert.AreEqual(originalUri, newUri); }
public async Task BlobSasBuilder_CorrelationId() { // Arrange BlobServiceClient oauthService = GetServiceClient_OauthAccount(); string containerName = GetNewContainerName(); await using DisposingContainer test = await GetTestContainerAsync(service : oauthService, containerName : containerName); // Arrange Response <UserDelegationKey> userDelegationKey = await oauthService.GetUserDelegationKeyAsync( startsOn : null, expiresOn : Recording.UtcNow.AddHours(1)); BlobSasBuilder blobSasBuilder = new BlobSasBuilder { StartsOn = Recording.UtcNow.AddHours(-1), ExpiresOn = Recording.UtcNow.AddHours(1), BlobContainerName = containerName, CorrelationId = Recording.Random.NewGuid().ToString() }; blobSasBuilder.SetPermissions(BlobSasPermissions.All); BlobUriBuilder blobUriBuilder = new BlobUriBuilder(test.Container.Uri) { Sas = blobSasBuilder.ToSasQueryParameters(userDelegationKey, test.Container.AccountName) }; BlobContainerClient containerClient = InstrumentClient(new BlobContainerClient(blobUriBuilder.ToUri(), GetOptions())); // Act await foreach (BlobItem pathItem in containerClient.GetBlobsAsync()) { // Just make sure the call succeeds. } }
/// <inheritdoc/> public async Task <Response <BlobContentInfo> > BlobUploadAsync(string filePath, Uri destinationUri) { _ = filePath ?? throw new ArgumentNullException(nameof(filePath)); _ = destinationUri ?? throw new ArgumentNullException(nameof(destinationUri)); BlobUriBuilder uriBuilder = new BlobUriBuilder(destinationUri) { BlobName = Path.GetFileName(filePath), }; var destinationBlobBaseClient = new BlobClient(uriBuilder.ToUri(), _tokenCredential); Response <BlobContentInfo> blobContentInfo; try { _log.LogInformation($"Uploading to Blob storage as blob:\n\t {filePath}\n"); blobContentInfo = await destinationBlobBaseClient.UploadAsync(filePath, true).ConfigureAwait(false); } catch (RequestFailedException e) when(e.ErrorCode == "AuthorizationPermissionMismatch") { var msg = $"BlobBaseClient.StartCopyFromUriAsync requires the identity principal to have role 'Storage Blob Data Contributor' on resource (file, container, resource-group, or subscription). " + $"Could not upload blob from {filePath} to {destinationUri}."; _log.LogError(e, msg); throw new Exception(msg, e); } catch (Exception e) { var msg = $"Could not upload blob from {filePath} to {destinationUri}."; _log.LogError(e, msg); throw new Exception(msg, e); } return(blobContentInfo); }
public void BlobUriBuilder_RegularUrl_CNAME() { var blobUriBuilder = new BlobUriBuilder(new Uri("http://www.contoso.com")); Assert.AreEqual(string.Empty, blobUriBuilder.AccountName); }
public (string, string) GetContainerAndNameFromUri(string blobUrl) { var blobUriBuilder = new BlobUriBuilder(new Uri(blobUrl)); return(blobUriBuilder.BlobContainerName, blobUriBuilder.BlobName); }
/// <inheritdoc/> public async Task <string> CopyFilesIntoNewAsset(IEnumerable <Uri> filesToCopy) { _ = filesToCopy ?? throw new ArgumentNullException(nameof(filesToCopy)); _ = !filesToCopy.Any() ? throw new ArgumentOutOfRangeException(nameof(filesToCopy), "Count is zero") : 0; string newAssetId; var assetUriBuilder = new BlobUriBuilder(filesToCopy.First()); string assetName = GetInputAssetName(assetUriBuilder); string assetAccountName = assetUriBuilder.AccountName; Uri assetUri; try { (newAssetId, assetUri) = await _mediaServicesV2RestWrapper.CreateEmptyAssetAsync(assetName, assetAccountName).ConfigureAwait(false); } catch (Exception e) { _log.LogExceptionObject(LogEventIds.MediaServicesV2InputAssetError, e, assetUriBuilder.ToUri()); throw new GridwichEncodeCreateJobException($"Failed to create asset for {assetUriBuilder.ToUri()}", null, e, LogEventIds.MediaServicesV2InputAssetError); } _log.LogEventObject(LogEventIds.MediaServicesV2AssetCreated, new { newAssetId, assetName }); try { // Create a new muted context for these copy operations var internalCorrelator = new JObject(); string id = (!string.IsNullOrWhiteSpace(newAssetId)) ? newAssetId : $"G:{Guid.NewGuid()}"; internalCorrelator.Add("~AMS-V2-Encode", id); var context = new StorageClientProviderContext(internalCorrelator, muted: true); foreach (var fileToCopy in filesToCopy) { var sourceUriBuilder = new BlobUriBuilder(fileToCopy); var destUriBuilder = new BlobUriBuilder(assetUri) { // we need to remove subfolders if any, as AMS v2 does not support subfolder(s) in an asset container BlobName = sourceUriBuilder.BlobName.Split('/').Last(), }; var exists = await _storageService.GetBlobExistsAsync(fileToCopy, context).ConfigureAwait(false); if (!exists) { _log.LogEventObject(LogEventIds.MediaServicesV2AttemptToUseNonexistentBlobAsInput, fileToCopy); throw new GridwichMediaServicesV2Exception($"Attempted to use nonexistent blob: {fileToCopy} as input to encoding.", LogEventIds.MediaServicesV2AttemptToUseNonexistentBlobAsInput, context.ClientRequestIdAsJObject); } var s = new Stopwatch(); s.Start(); var copyFromUriOperation = await _storageService.BlobCopy(fileToCopy, destUriBuilder.ToUri(), context).ConfigureAwait(false); var response = await copyFromUriOperation.WaitForCompletionAsync().ConfigureAwait(false); s.Stop(); _log.LogEventObject(LogEventIds.MediaServicesV2CopyFileCompleted, new { CopyElapsedMilliseconds = s.ElapsedMilliseconds.ToString("G", CultureInfo.InvariantCulture) }); } await _mediaServicesV2RestWrapper.CreateFileInfosAsync(newAssetId).ConfigureAwait(false); _log.LogEventObject(LogEventIds.MediaServicesV2CopyFileAndUpdateAssetSuccess, new { assetName, assetUri }); } catch (Exception e) when(!(e is GridwichMediaServicesV2Exception)) { _log.LogExceptionObject(LogEventIds.MediaServicesV2CopyFileAndUpdateAssetError, e, filesToCopy); throw new GridwichEncodeCreateJobException($"Failed to copy {assetName} to {newAssetId}", null, e, LogEventIds.MediaServicesV2CopyFileAndUpdateAssetError); } return(newAssetId); }
private static string GetOutputAssetName(string outputAssetStorageAccountName, BlobUriBuilder outputContainerUriBuilder) { if (outputContainerUriBuilder.BlobContainerName != outputContainerUriBuilder.BlobContainerName.ToLower(CultureInfo.InvariantCulture)) { throw new ArgumentException($"ContainerName {outputContainerUriBuilder.BlobContainerName} must be lowercase."); } return($"V2-{outputAssetStorageAccountName}-{outputContainerUriBuilder.BlobContainerName}-Output"); }
private static string GetInputAssetName(BlobUriBuilder sourceUriBuilder) { return($"V2-{sourceUriBuilder.AccountName}-{sourceUriBuilder.BlobContainerName}-Input"); }
/// <inheritdoc/> public async Task <string> SubmitMesJobAsync(string inputAssetId, string preset, Uri outputContainer, Uri callbackEndpoint, IDictionary <string, string> correlationData) { if (string.IsNullOrWhiteSpace(inputAssetId)) { throw new ArgumentException($@"{nameof(inputAssetId)} is invalid", nameof(inputAssetId)); } if (string.IsNullOrWhiteSpace(preset)) { throw new ArgumentException($@"{nameof(preset)} is invalid", nameof(preset)); } _ = outputContainer ?? throw new ArgumentNullException(nameof(outputContainer)); _ = callbackEndpoint ?? throw new ArgumentNullException(nameof(callbackEndpoint)); _ = correlationData ?? throw new ArgumentNullException(nameof(correlationData)); string outputAssetName; string outputAssetStorageAccountName; string jobName; try { var outputContainerUriBuilder = new BlobUriBuilder(outputContainer); outputAssetStorageAccountName = outputContainerUriBuilder.AccountName; outputAssetName = GetOutputAssetName(outputAssetStorageAccountName, outputContainerUriBuilder); jobName = GenerateJobName(outputAssetName); } catch (Exception e) { _log.LogExceptionObject(LogEventIds.MediaServicesV2FailedToParseOutputContainer, e, outputContainer); throw new GridwichEncodeCreateJobException($"Could not define output asset name or job name from {outputContainer}.", null, e, LogEventIds.MediaServicesV2FailedToParseOutputContainer); } string processorId; try { processorId = await _mediaServicesV2RestWrapper.GetLatestMediaProcessorAsync("Media Encoder Standard").ConfigureAwait(false); } catch (Exception e) { _log.LogExceptionObject(LogEventIds.MediaServicesV2FailedToGetProcessor, e, outputContainer); throw new GridwichEncodeCreateJobException($"Could not get media processor.", null, e, LogEventIds.MediaServicesV2FailedToGetProcessor); } string base64UrlEncodedCorrelationDataJsonString; try { var correlationDataJsonString = JsonConvert.SerializeObject(correlationData); base64UrlEncodedCorrelationDataJsonString = Base64UrlEncoder.Encode(correlationDataJsonString); if (base64UrlEncodedCorrelationDataJsonString.Length > 4000) { const string ErrorMsg = "UrlEncoded and serialized correlationData is larger than 4000"; _log.LogEvent(LogEventIds.MediaServicesV2CorrelationDataError, ErrorMsg, correlationData); throw new ArgumentException(ErrorMsg, nameof(correlationData)); } } catch (Exception e) { _log.LogExceptionObject(LogEventIds.MediaServicesV2CorrelationDataError, e, correlationData); throw new GridwichEncodeCreateJobException($"Could not convert correlationData.", null, e, LogEventIds.MediaServicesV2CorrelationDataError); } string notificationEndPointId; try { notificationEndPointId = await _mediaServicesV2RestWrapper.GetOrCreateNotificationEndPointAsync("AmsV2Callback", callbackEndpoint).ConfigureAwait(false); } catch (Exception e) { _log.LogExceptionObject(LogEventIds.MediaServicesV2SpecificDataError, e, callbackEndpoint); throw new GridwichEncodeCreateJobException($"Could not create notification endpoint for {callbackEndpoint}", null, e, LogEventIds.MediaServicesV2SpecificDataError); } string jobId; try { jobId = await _mediaServicesV2RestWrapper.CreateJobAsync( jobName, processorId, inputAssetId, preset, outputAssetName, outputAssetStorageAccountName, correlationData : base64UrlEncodedCorrelationDataJsonString, notificationEndPointId).ConfigureAwait(false); } catch (Exception e) { _log.LogExceptionObject(LogEventIds.MediaServicesV2SubmitMesJobFailure, e, new { jobName, processorId, inputAssetId, preset, outputAssetName, outputAssetStorageAccountName, base64UrlEncodedCorrelationDataJsonString, notificationEndPointId }); throw new GridwichEncodeCreateJobException($"Could not start media encoder standard job.", null, e, LogEventIds.MediaServicesV2SubmitMesJobFailure); } return(jobId); }
public static async Task Run( [EventGridTrigger] EventGridEvent eventGridEvent, ILogger log) { log.LogInformation(eventGridEvent.Subject); string eventCallerName = "CafMeteorologyECTowerFcns.DataLakeTransientToRaw()"; string eventCallerVersion = "v0.1.7"; log.LogInformation(eventCallerVersion); EtlEventService etlEventService = new EtlEventService( eventCallerName, eventCallerVersion, "AzureFunction"); etlEventService.LogInformation( eventCallerName, eventCallerVersion, eventGridEvent.ToString()); etlEventService.AddInput($"EventGridEvent.Subject: {eventGridEvent.Subject}"); etlEventService.LogInformation( eventCallerName, eventCallerVersion, $"EventGridEvent.Data: {eventGridEvent.Data}"); // Authenticate the Function for access to blob containers string objectId = Environment.GetEnvironmentVariable("FUNCTION_OBJECT_ID"); ManagedIdentityCredential credential = new ManagedIdentityCredential(objectId); // Read parameters string OUTPUT_CONTAINER = Environment.GetEnvironmentVariable("OUTPUT_CONTAINER"); string PROJECT_ID = Environment.GetEnvironmentVariable("PROJECT_ID"); string DATALAKE_ENDPOINT = Environment.GetEnvironmentVariable("DATALAKE_ENDPOINT"); if (eventGridEvent.EventType != "Microsoft.Storage.BlobCreated") { string msg = "EventType not BlobCreated, aborting"; etlEventService.LogError( eventCallerName, eventCallerVersion, msg); throw new Exception(msg); } // Get info from the event log.LogInformation("Parsing Event"); JsonDocument json = JsonDocument.Parse(eventGridEvent.Data.ToString()); string apiCall = json.RootElement.GetProperty("api").GetString(); log.LogInformation($"api: {apiCall}"); if (!(apiCall == "FlushWithClose" | apiCall == "PutBlob" | apiCall == "PutBlockList" | apiCall == "CopyBlob")) { string msg = "EventGridEvent api not completely committed, aborting"; log.LogInformation(msg); return; } try { string inputBlobUri = json.RootElement .GetProperty("url") .GetString(); BlobUriBuilder inputBlobUriBuilder = new BlobUriBuilder( new Uri(inputBlobUri)); // Get input blob contents log.LogInformation("Creating blob container client"); var blobContainerClient = new BlobContainerClient( new Uri( $"https://{inputBlobUriBuilder.Host}/{inputBlobUriBuilder.BlobContainerName}"), credential); var inputBlobClient = blobContainerClient.GetBlobClient(inputBlobUriBuilder.BlobName); if (!inputBlobClient.Exists()) { log.LogInformation("Blob does not exist, exiting"); return; } log.LogInformation("Found blob, downloading content"); BlobDownloadInfo download = await inputBlobClient.DownloadAsync(); string blobContent; using (StreamReader reader = new StreamReader(download.Content)) blobContent = await reader.ReadToEndAsync(); string blobName = Path.GetFileName(new Uri(inputBlobUri).AbsolutePath); log.LogInformation($"Blob length: {blobContent.Length}"); if (blobContent.Length <= 0) { log.LogInformation("Blob is empty, exiting"); return; } // Get metadata from input blob log.LogInformation("Parsing Blob into TOA5 metadata"); TOA5Extractor extractor = new TOA5Extractor( blobName, blobContent, -8); Metadata blobMetadata = extractor.GetMetadata(); string outputBlobDataset = GetOutputDatasetName(blobMetadata); string outputBlobDirPath = GetOutputSubDirPath(blobName); // Move blob log.LogInformation("Moving blob"); AzureDataLakeService dataLakeService = new AzureDataLakeService( DATALAKE_ENDPOINT, credential); string outputBlobPath = $"{PROJECT_ID}/{outputBlobDataset}/{outputBlobDirPath}/{blobName}"; log.LogInformation(outputBlobPath); string outputUri = await dataLakeService.MoveBlob( inputBlobUriBuilder.BlobContainerName, inputBlobUriBuilder.BlobName, OUTPUT_CONTAINER, outputBlobPath, etlEventService); log.LogInformation("Blob moved"); etlEventService.AddOutput(outputUri); } catch (XmlException xmlException) { log.LogError(xmlException.Message); } catch (Exception e) { etlEventService.LogError( eventCallerName, eventCallerVersion, $"Exception occured: {e}"); throw new Exception("Error in function", e); } finally { // Write EtlEvent EtlEventServiceConfig etlEventServiceConfig = new EtlEventServiceConfig() { Zone = OUTPUT_CONTAINER, Project = PROJECT_ID, Endpoint = new Uri(DATALAKE_ENDPOINT), Credential = credential }; log.LogInformation("Writing EtlEvent"); string etlEventUri = await etlEventService.WriteAsync(etlEventServiceConfig); log.LogInformation($"Wrote EtlEvent to: {etlEventUri}"); } }
public async Task SasBuilderIdentifier() { string accountName = StorageAccountName; string accountKey = StorageAccountKey; string containerName = Randomize("sample-container"); string blobName = Randomize("sample-blob"); StorageSharedKeyCredential sharedKeyCredential = new StorageSharedKeyCredential(StorageAccountName, StorageAccountKey); // setup blob var container = new BlobContainerClient(ConnectionString, containerName); try { await container.CreateAsync(); await container.GetBlobClient(blobName).UploadAsync(new MemoryStream(Encoding.UTF8.GetBytes("hello world"))); // Create one or more stored access policies. List <BlobSignedIdentifier> signedIdentifiers = new List <BlobSignedIdentifier> { new BlobSignedIdentifier { Id = "mysignedidentifier", AccessPolicy = new BlobAccessPolicy { StartsOn = DateTimeOffset.UtcNow.AddHours(-1), ExpiresOn = DateTimeOffset.UtcNow.AddDays(1), Permissions = "rw" } } }; // Set the container's access policy. await container.SetAccessPolicyAsync(permissions : signedIdentifiers); #region Snippet:SampleSnippetsBlobMigration_SasBuilderIdentifier // Create BlobSasBuilder and specify parameters BlobSasBuilder sasBuilder = new BlobSasBuilder() { BlobContainerName = containerName, BlobName = blobName, Identifier = "mysignedidentifier" }; #endregion // Create full, self-authenticating URI to the resource BlobUriBuilder uriBuilder = new BlobUriBuilder(StorageAccountBlobUri) { BlobContainerName = containerName, BlobName = blobName, Sas = sasBuilder.ToSasQueryParameters(sharedKeyCredential) }; Uri sasUri = uriBuilder.ToUri(); // successful download indicates pass await new BlobClient(sasUri).DownloadToAsync(new MemoryStream()); } finally { await container.DeleteIfExistsAsync(); } }
/// <summary> /// This is where the work of the sourcerer happens /// </summary> /// <param name="cancellationToken"></param> /// <returns></returns> private async Task RunAsync(CancellationToken cancellationToken) { int messageCount = 0; // Get a reference to our source blob client and SAS for AzReplicate to use to access it var sourceClient = _blobServiceConnections.SourceClient; var sourceSas = _blobServiceConnections.SourceSas; // Get a reference to our destination blob client(s) and SAS(s) for AzReplicate to use to access it var destinationClients = _blobServiceConnections.DestinationClients; var destinationSas = _blobServiceConnections.DestinationSas; //Ensure the queue exists _queueClient.CreateIfNotExists(cancellationToken: cancellationToken); //iterate over each of the containers in the source account await foreach (var sourceContainer in sourceClient.GetBlobContainersAsync(BlobContainerTraits.Metadata, cancellationToken: cancellationToken)) { // get a container client to talk to the container var sourceContainerClient = sourceClient.GetBlobContainerClient(sourceContainer.Name); // iterate over each of the blobs in the source container await foreach (var sourceBlob in sourceContainerClient.GetBlobsAsync(BlobTraits.Metadata, cancellationToken: cancellationToken)) { // URI pointing to the source file with SAS var sourceUri = new BlobUriBuilder(sourceClient.Uri); sourceUri.BlobContainerName = sourceContainer.Name; sourceUri.BlobName = sourceBlob.Name; sourceUri.Query = sourceSas.ToString(); // URI pointing to the dest file with SAS // If configured with more than one dest account, files will be round robin distributed accross all distination accounts var destinationClientKey = destinationClients.ElementAt(messageCount % destinationClients.Count()).Key; var destUri = new BlobUriBuilder(destinationClients[destinationClientKey].Uri); destUri.BlobContainerName = sourceContainer.Name; destUri.BlobName = sourceBlob.Name; destUri.Query = destinationSas[destinationClientKey].ToString(); //create the message to put in the queue //we use the replicatable class to ensure that the message is in the format that //AzReplicate is expecting var message = new Replicatable { //the file we want to copy, including required SAS signature Source = sourceUri.ToString(), //the place we want AzReplicate to put the file, including required SAS signature Destination = destUri.ToString(), //Anything you pass along in the Diag Info will be available on the completer DiagnosticInfo = null, //Here we are passing along any metadata from the source Metadata = sourceBlob.Metadata.ToDictionary(x => x.Key, x => x.Value) }; //convert the message to Json and put it in the queue var serializedMessage = JsonConvert.SerializeObject(message); await _queueClient.SendMessageAsync(serializedMessage, timeToLive : TimeSpan.FromSeconds(-1), cancellationToken : cancellationToken); messageCount++; //show some progress in the logs if ((messageCount % 100) == 0) { _logger.LogInformation($"Sample AzBlobSourcerer Worker {messageCount} items added to the queue at {DateTimeOffset.UtcNow}"); } } //log that we are all done _logger.LogInformation($"Sample AzBlobSourcerer Worker Done adding items to the queue. Total message count {messageCount} at {DateTimeOffset.UtcNow}"); } }
public void BlobUriBuilder_AccountNamePeriod() { var blobUriBuilder = new BlobUriBuilder(new Uri("https://account.z.blob.core.windows.net/share/dir")); Assert.AreEqual("account", blobUriBuilder.AccountName); }
public async Task ConnectionStringParse() { #region Snippet:EventHubs_Processor_Sample05_ConnectionStringParse TokenCredential credential = new DefaultAzureCredential(); var storageConnectionString = "<< CONNECTION STRING FOR THE STORAGE ACCOUNT >>"; var blobContainerName = "<< NAME OF THE BLOB CONTAINER >>"; /*@@*/ /*@@*/ storageConnectionString = StorageTestEnvironment.Instance.StorageConnectionString; /*@@*/ blobContainerName = _storageScope.ContainerName; var eventHubsConnectionString = "<< CONNECTION STRING FOR THE EVENT HUBS NAMESPACE >>"; var eventHubName = "<< NAME OF THE EVENT HUB >>"; var consumerGroup = "<< NAME OF THE EVENT HUB CONSUMER GROUP >>"; /*@@*/ /*@@*/ eventHubsConnectionString = EventHubsTestEnvironment.Instance.EventHubsConnectionString; /*@@*/ eventHubName = _eventHubScope.EventHubName; /*@@*/ consumerGroup = _eventHubScope.ConsumerGroups.First(); /*@@*/ credential = EventHubsTestEnvironment.Instance.Credential; var storageEndpoint = new BlobServiceClient(storageConnectionString).Uri; var blobUriBuilder = new BlobUriBuilder(storageEndpoint); blobUriBuilder.BlobContainerName = blobContainerName; var storageClient = new BlobContainerClient( blobUriBuilder.ToUri(), credential); EventHubsConnectionStringProperties properties = EventHubsConnectionStringProperties.Parse(eventHubsConnectionString); var processor = new EventProcessorClient( storageClient, consumerGroup, properties.FullyQualifiedNamespace, properties.EventHubName ?? eventHubName, credential); try { using var cancellationSource = new CancellationTokenSource(); cancellationSource.CancelAfter(TimeSpan.FromSeconds(30)); // The event handlers are not relevant for this sample; for // illustration, they're delegating the implementation to the // host application. processor.ProcessEventAsync += Application.ProcessorEventHandler; processor.ProcessErrorAsync += Application.ProcessorErrorHandler; try { await processor.StartProcessingAsync(cancellationSource.Token); await Task.Delay(Timeout.Infinite, cancellationSource.Token); } catch (TaskCanceledException) { // This is expected if the cancellation token is // signaled. } finally { // This may take up to the length of time defined // as part of the configured TryTimeout of the processor; // by default, this is 60 seconds. await processor.StopProcessingAsync(); } } catch { // If this block is invoked, then something external to the // processor was the source of the exception. } finally { // It is encouraged that you unregister your handlers when you have // finished using the Event Processor to ensure proper cleanup. processor.ProcessEventAsync -= Application.ProcessorEventHandler; processor.ProcessErrorAsync -= Application.ProcessorErrorHandler; } #endregion }
/// <inheritdoc/> public async Task <string> CopyFilesIntoNewAsset(IEnumerable <Uri> filesToCopy) { _ = filesToCopy ?? throw new ArgumentNullException(nameof(filesToCopy)); _ = !filesToCopy.Any() ? throw new ArgumentOutOfRangeException(nameof(filesToCopy), "Count is zero") : 0; string newAssetId; var assetUriBuilder = new BlobUriBuilder(filesToCopy.First()); string assetName = GetInputAssetName(assetUriBuilder); string assetAccountName = assetUriBuilder.AccountName; Uri assetUri; try { (newAssetId, assetUri) = await _mediaServicesV2RestSharp.CreateEmptyAssetAsync(assetName, assetAccountName).ConfigureAwait(false); } catch (Exception e) { _log.LogError(e, $"Error creating asset for {assetUriBuilder.ToUri()}"); throw new Exception($"Failed to create asset for {assetUriBuilder.ToUri()}", e); } _log.LogInformation($"Created {newAssetId}, {assetName}"); try { foreach (var fileToCopy in filesToCopy) { var sourceUriBuilder = new BlobUriBuilder(fileToCopy); var destUriBuilder = new BlobUriBuilder(assetUri) { BlobName = sourceUriBuilder.BlobName, }; var exists = await _storageService.BlobExistsAsync(fileToCopy).ConfigureAwait(false); if (!exists) { _log.LogError($"Attempted to use nonexistent blob: {fileToCopy} as input to encoding."); } var s = new Stopwatch(); s.Start(); var copyFromUriOperation = await _storageService.BlobCopyAsync(fileToCopy, destUriBuilder.ToUri()).ConfigureAwait(false); var response = await copyFromUriOperation.WaitForCompletionAsync().ConfigureAwait(false); s.Stop(); _log.LogInformation($"MediaServicesV2CopyFileCompleted {s.ElapsedMilliseconds.ToString("G", CultureInfo.InvariantCulture)}"); } await _mediaServicesV2RestSharp.CreateFileInfosAsync(newAssetId).ConfigureAwait(false); _log.LogInformation($"MediaServicesV2CopyFileAndUpdateAssetSuccess {assetName}, {assetUri}"); } catch (Exception e) { _log.LogError(e, "Failed while coping files"); throw new Exception($"Failed to copy {assetName} to {newAssetId}", e); } return(newAssetId); }
/// <summary> /// Generates the input asset name from the source Uri. /// </summary> /// <param name="sourceUriBuilder">Source Uri.</param> /// <returns>The input asset name generated.</returns> public string GetInputAssetName(BlobUriBuilder sourceUriBuilder) { _ = sourceUriBuilder ?? throw new ArgumentNullException(nameof(sourceUriBuilder)); return($"V2-{sourceUriBuilder.AccountName}-{sourceUriBuilder.BlobContainerName}-Input"); }
public void BlobUriBuilder_AccountNameError() { var blobUriBuilder = new BlobUriBuilder(new Uri("http://notaurl")); Assert.IsEmpty(blobUriBuilder.AccountName); }
async Task <Uri> IAzureBlobStoreClient.GenerateEphemeralDownloadLink(string containerName, string blobName, string blobVersion, string publicFacingBlobName, CancellationToken cancellationToken) { // We will secure the link by creating a user delegate sas token signed by the managed identity of this application, thus // only the intersection of allowed permissions are applicable. In this case, we only want to assign the read // permission to the token, and for such access to be limited to a set period of time after which the token will expire // // If running local, note that the azure credentials resolved are those you are logged in as (for example the Visual Studio // azure account) if (string.IsNullOrWhiteSpace(containerName)) { throw new ArgumentNullException(nameof(containerName)); } if (string.IsNullOrWhiteSpace(blobName)) { throw new ArgumentNullException(nameof(blobName)); } if (string.IsNullOrWhiteSpace(blobVersion)) { throw new ArgumentNullException(nameof(blobVersion)); } if (string.IsNullOrWhiteSpace(publicFacingBlobName)) { throw new ArgumentNullException(nameof(publicFacingBlobName)); } cancellationToken.ThrowIfCancellationRequested(); var managedIdentityCredential = new DefaultAzureCredential(); var blobClientOptions = GetBlobClientOptions(_geoRedundantServiceUrl); var blobServiceClient = new BlobServiceClient(_primaryServiceUrl, managedIdentityCredential, blobClientOptions); var blobContainerClient = blobServiceClient.GetBlobContainerClient(containerName); var blobClient = blobContainerClient.GetBlobClient(blobName).WithVersion(blobVersion); var tokenStartsOn = _systemClock.UtcNow; var tokenExpiresOn = tokenStartsOn.AddMinutes(TOKEN_SAS_TIMEOUT_IN_MINUTES); var fileInfo = new FileInfo(publicFacingBlobName); var setContentDisposition = !string.IsNullOrWhiteSpace(fileInfo.Extension); var userDelegationKey = await _memoryCache.GetOrCreateAsync( $"{nameof(AzureBlobStoreClient)}:UserDelegationKey", async cacheEntry => { cacheEntry.Priority = CacheItemPriority.High; cacheEntry.AbsoluteExpirationRelativeToNow = TimeSpan.FromDays(1); try { var azureResponse = await blobServiceClient.GetUserDelegationKeyAsync(tokenStartsOn, tokenExpiresOn, cancellationToken); return(azureResponse.Value); } catch (RequestFailedException ex) { _logger?.LogError(ex, "Unable to access the storage endpoint to generate a user delegation key: '{StatusCode} {StatusCodeName}'", ex.Status, Enum.Parse(typeof(HttpStatusCode), Convert.ToString(ex.Status, CultureInfo.InvariantCulture))); throw; } }); var readOnlyPermission = BlobSasPermissions.Read; var blobSasBuilder = new BlobSasBuilder(readOnlyPermission, tokenExpiresOn) { BlobContainerName = blobContainerClient.Name, BlobName = blobClient.Name, BlobVersionId = blobVersion, Resource = "b", StartsOn = tokenStartsOn, ExpiresOn = tokenExpiresOn, Protocol = SasProtocol.Https, ContentDisposition = setContentDisposition ? $"attachment; filename*=UTF-8''{Uri.EscapeDataString(publicFacingBlobName)}" : default //PreauthorizedAgentObjectId = set this if we use AAD to authenticate our users, }; var blobUriBuilder = new BlobUriBuilder(blobClient.Uri) { Sas = blobSasBuilder.ToSasQueryParameters(userDelegationKey, blobServiceClient.AccountName) }; var uri = blobUriBuilder.ToUri(); return(uri); }
public async Task DefaultAzureCredential() { await using var eventHubScope = await EventHubScope.CreateAsync(1); await using var storageScope = await StorageScope.CreateAsync(); #region Snippet:EventHubs_Processor_Sample05_DefaultAzureCredential TokenCredential credential = new DefaultAzureCredential(); var storageEndpoint = "<< STORAGE ENDPOINT (likely similar to {your-account}.blob.core.windows.net) >>"; var blobContainerName = "<< NAME OF THE BLOB CONTAINER >>"; /*@@*/ /*@@*/ storageEndpoint = new BlobServiceClient(StorageTestEnvironment.Instance.StorageConnectionString).Uri.ToString(); /*@@*/ blobContainerName = storageScope.ContainerName; var fullyQualifiedNamespace = "<< NAMESPACE (likely similar to {your-namespace}.servicebus.windows.net) >>"; var eventHubName = "<< NAME OF THE EVENT HUB >>"; var consumerGroup = "<< NAME OF THE EVENT HUB CONSUMER GROUP >>"; /*@@*/ /*@@*/ fullyQualifiedNamespace = EventHubsTestEnvironment.Instance.FullyQualifiedNamespace; /*@@*/ eventHubName = eventHubScope.EventHubName; /*@@*/ consumerGroup = eventHubScope.ConsumerGroups.First(); /*@@*/ credential = EventHubsTestEnvironment.Instance.Credential; var blobUriBuilder = new BlobUriBuilder(new Uri(storageEndpoint)); blobUriBuilder.BlobContainerName = blobContainerName; var storageClient = new BlobContainerClient( blobUriBuilder.ToUri(), credential); var processor = new EventProcessorClient( storageClient, consumerGroup, fullyQualifiedNamespace, eventHubName, credential); try { using var cancellationSource = new CancellationTokenSource(); cancellationSource.CancelAfter(TimeSpan.FromSeconds(30)); // The event handlers are not relevant for this sample; for // illustration, they're delegating the implementation to the // host application. processor.ProcessEventAsync += Application.ProcessorEventHandler; processor.ProcessErrorAsync += Application.ProcessorErrorHandler; try { await processor.StartProcessingAsync(cancellationSource.Token); await Task.Delay(Timeout.Infinite, cancellationSource.Token); } catch (TaskCanceledException) { // This is expected if the cancellation token is // signaled. } finally { // This may take up to the length of time defined // as part of the configured TryTimeout of the processor; // by default, this is 60 seconds. await processor.StopProcessingAsync(); } } catch { // If this block is invoked, then something external to the // processor was the source of the exception. } finally { // It is encouraged that you unregister your handlers when you have // finished using the Event Processor to ensure proper cleanup. processor.ProcessEventAsync -= Application.ProcessorEventHandler; processor.ProcessErrorAsync -= Application.ProcessorErrorHandler; } #endregion }
/// <summary> /// Initializes a new instance of the <see cref="AppendBlobClient"/> /// class with an identical <see cref="Uri"/> source but the specified /// <paramref name="snapshot"/> timestamp. /// /// For more information, see <see href="https://docs.microsoft.com/en-us/rest/api/storageservices/creating-a-snapshot-of-a-blob" />. /// </summary> /// <param name="snapshot">The snapshot identifier.</param> /// <returns>A new <see cref="AppendBlobClient"/> instance.</returns> /// <remarks> /// Pass null or empty string to remove the snapshot returning a URL /// to the base blob. /// </remarks> public new AppendBlobClient WithSnapshot(string snapshot) { var builder = new BlobUriBuilder(Uri) { Snapshot = snapshot }; return new AppendBlobClient(builder.ToUri(), Pipeline, Version, ClientDiagnostics, CustomerProvidedKey, EncryptionScope); }