public static async Task <IActionResult> Run( [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequest req, ILogger log) { try { log.LogInformation("GetAzureStorageSASUploadToken begin request."); string cloudAccountName = null; string cloudKey = null; if (req.Host.ToString().ToLower().Contains("localhost")) { cloudAccountName = "devstoreaccount1"; cloudKey = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw=="; } else { // Uses MSI to get an Azure AD token: You can run locally if you have a domain joined computer and your domain is synced with Azure AD // The Function App must be in a Policy to to read secrets AzureServiceTokenProvider azureServiceTokenProvider = new AzureServiceTokenProvider(); KeyVaultClient keyvaultClient = new KeyVaultClient(new KeyVaultClient.AuthenticationCallback(azureServiceTokenProvider.KeyVaultTokenCallback)); string keyVault = Environment.GetEnvironmentVariable("MY_KEY_VAULT"); var secretCloudAccountName = await keyvaultClient.GetSecretAsync($"https://{keyVault}.vault.azure.net/", "LandingZoneStorageAccountName"); var secretcloudKey = await keyvaultClient.GetSecretAsync($"https://{keyVault}.vault.azure.net/", "LandingZoneStorageAccountKey"); cloudAccountName = secretCloudAccountName.Value; cloudKey = secretcloudKey.Value; } string customerId = req.Query["customerId"]; string customerSecret = req.Query["customerSecret"]; string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); dynamic data = JsonConvert.DeserializeObject(requestBody); customerId = customerId ?? data?.customerId; customerSecret = customerSecret ?? data?.customerSecret; customerId = customerId.ToLower(); DocumentDBRepository <CosmosIngestionData> documentDBRepository = new DocumentDBRepository <CosmosIngestionData>(log); var result = documentDBRepository.GetItems(o => o.CustomerId == customerId && o.PartitionId == customerId && o.CustomerSecret == customerSecret && o.isCustomerEnabled).FirstOrDefault(); // INSERT SEED DATA if (result == null && customerId == "acmeinc") { CosmosIngestionData acmeInc = new CosmosIngestionData(); acmeInc.CustomerId = "acmeinc"; acmeInc.PartitionId = "acmeinc"; acmeInc.ContainerName = "acmeinc"; acmeInc.CustomerSecret = "0DC8B9026ECD402C84C66AFB5B87E28C"; acmeInc.CustomerSASTokenExpireTimeInMinutes = 60; acmeInc.CustomerWhitelistIPAddress = null; acmeInc.isCustomerEnabled = true; acmeInc.ADFPipelineName = "CopyLandingDataToDataLake"; acmeInc.ADFResourceGroup = Environment.GetEnvironmentVariable("ResourceGroup"); acmeInc.ADFDataFactoryName = Environment.GetEnvironmentVariable("DataFactoryName"); acmeInc.ADFSubscriptionId = Environment.GetEnvironmentVariable("SubscriptionId"); acmeInc.isADFEnabled = true; // Insert the seed data documentDBRepository.Client.UpsertDocumentAsync(documentDBRepository.Collection.SelfLink, acmeInc).Wait(); result = acmeInc; } if (result != null) { // create a blob container // create a SAS token with list and write privilages (no read or delete) - they can upload, but never download to protect their data string sasToken = GetSASToken(result.ContainerName, result.CustomerWhitelistIPAddress, result.CustomerSASTokenExpireTimeInMinutes, cloudAccountName, cloudKey); ReturnData returnData = new ReturnData() { AccountName = cloudAccountName == "devstoreaccount1" ? "http://127.0.0.1:10000/devstoreaccount1" : cloudAccountName, ContainerName = result.ContainerName, SASToken = sasToken }; return((ActionResult) new OkObjectResult(returnData)); } else { return(new UnauthorizedResult()); } } catch (Exception ex) { log.LogError("GetAzureStorageSASUploadToken Exception: " + ex.ToString()); return(new BadRequestResult()); } } // Run
} // Run static void ProcessItem(Microsoft.WindowsAzure.Storage.Queue.CloudQueueMessage queueItem, string queueName, Microsoft.WindowsAzure.Storage.Queue.CloudQueue cloudQueue, ILogger log) { log.LogInformation("ProcessItem:" + queueItem); JSONService jsonService = new JSONService(); FileEvent fileEvent = jsonService.Deserialize <FileEvent>(queueItem.AsString); log.LogInformation("ProcessItem Topic: " + fileEvent.topic); log.LogInformation("ProcessItem Subject: " + fileEvent.subject); // Make sure we have the correct event if (fileEvent.eventType == "Microsoft.Storage.BlobCreated") // && fileEvent.subject.ToLower().EndsWith("end_file.txt")) { // e.g. "subject": "/blobServices/default/containers/acmeinc/blobs/inbox/2020-04-13/test_end_file.txt" // 01234567890123456789012345678901234567890123456789 14 -> 30 =16 // e.g. "subject": "/blobServices/default/containers/acmeinc/blobs/test_end_file.txt" // 01234567890123456789012345678901234567890123456789 13 -> 13 = 0 string removedPrefix = fileEvent.subject.ToLower().Replace("/blobservices/default/containers/", string.Empty); string customerId = removedPrefix.Substring(0, removedPrefix.IndexOf("/")); string partitionId = removedPrefix.Substring(0, removedPrefix.IndexOf("/")); // this just happens to be the same as the customer id in the this example // Get the Cosmos DB data as to which pipeline belongs to this customer. DocumentDBRepository <CosmosIngestionData> documentDBRepository = new DocumentDBRepository <CosmosIngestionData>(log); var result = documentDBRepository.GetItems(o => o.CustomerId == customerId && o.PartitionId == partitionId).FirstOrDefault(); if (result == null) { // Someone forgot to create the CosmosDB document (whoops), send an email and log. Give the developer "1" hour to get things setup. log.LogInformation("Pipeline does not exist. Setting queue item to retry in 1 hour."); QueueService.IncreaseQueueItemLock(queueName, queueItem, TimeSpan.FromHours(1), cloudQueue); } else { if (result.isADFEnabled) { // We are good to try to start the ADF log.LogInformation("Starting Pipeline: " + result.ADFPipelineName); try { // Try to start the pipeline (we are "trying" since technically something could have just disabled it :) ) DateTime dtNow = DateTime.UtcNow; // you could make this EST or such.... string inputFileSystem = customerId; int blobsIndex = removedPrefix.IndexOf("/blobs/") + 7; int lastSlashIndex = removedPrefix.LastIndexOf("/"); string inputFileDirectory = removedPrefix.Substring(blobsIndex, lastSlashIndex - blobsIndex); string outputFileSystem = Environment.GetEnvironmentVariable("LandingZoneDataLakeContainer"); string outputFileDirectory = string.Format("customer-landing-data/{0}/{1:yyyy}/{2:MM}/{3:dd}", customerId, dtNow, dtNow, dtNow); DataFactoryService.StartPipeline(inputFileSystem, inputFileDirectory, outputFileSystem, outputFileDirectory, result.ADFPipelineName, result.ADFResourceGroup, result.ADFDataFactoryName, result.ADFSubscriptionId, log); QueueService.DeleteQueueItem(queueName, queueItem, cloudQueue); } catch (Exception ex) { // Pipeline could not start (it might be in a "provisioning state", set the queue item to try again in "5" minutes log.LogInformation("Error starting Pipeline (item not dequeued): " + result.ADFPipelineName + " Error: " + ex.ToString()); QueueService.IncreaseQueueItemLock(queueName, queueItem, TimeSpan.FromMinutes(5), cloudQueue); } } else { // Pipeline is not enabled, set the queue item to try again in "5" minutes log.LogInformation("Pipeline: " + result.ADFPipelineName + " is not enabled. Setting queue item to retry in 5 minutes."); QueueService.IncreaseQueueItemLock(queueName, queueItem, TimeSpan.FromMinutes(5), cloudQueue); } } } else { // Not a valid event item - delete from queue and log (tell someone to fixe the blob event filter) QueueService.DeleteQueueItem(queueName, queueItem, cloudQueue); } System.Threading.Thread.Sleep(500); // Just so the UI does not scroll too fast } // Process Item