public static async Task Run([EventGridTrigger] EventGridEvent blobCreatedEvent, [Blob("{data.url}", FileAccess.Read, Connection = "FBI-STORAGEACCT")] Stream myBlob, ILogger log) { bool trbundles = true; string strbundles = System.Environment.GetEnvironmentVariable("FBI-TRANSFORMBUNDLES"); if (!string.IsNullOrEmpty(strbundles) && (strbundles.ToLower().Equals("no") || strbundles.ToLower().Equals("false"))) { trbundles = false; } StorageBlobCreatedEventData createdEvent = ((JObject)blobCreatedEvent.Data).ToObject <StorageBlobCreatedEventData>(); string name = createdEvent.Url.Substring(createdEvent.Url.LastIndexOf('/') + 1); if (myBlob == null) { return; } log.LogInformation($"ImportFHIRBUndles: Processing file Name:{name} \n Size: {myBlob.Length}"); var cbclient = StorageUtils.GetCloudBlobClient(System.Environment.GetEnvironmentVariable("FBI-STORAGEACCT")); StreamReader reader = new StreamReader(myBlob); var text = await reader.ReadToEndAsync(); var trtext = (trbundles ? FHIRUtils.TransformBundle(text, log) : text); var fhirbundle = await FHIRUtils.CallFHIRServer("", trtext, HttpMethod.Post, log); var result = LoadErrorsDetected(trtext, fhirbundle, name, log); //Bundle Post was Throttled we can retry if (!fhirbundle.Success && fhirbundle.Status == System.Net.HttpStatusCode.TooManyRequests) { //Currently cannot use retry hints with EventGrid Trigger function bindings so we will throw and exception to enter eventgrid retry logic for FHIR Server throttling and do //our own dead letter for internal errors or unrecoverable conditions log.LogInformation($"ImportFHIRBUndles File Name:{name} is throttled..."); throw new Exception($"ImportFHIRBUndles: Transient Error File: {name}...Entering eventgrid retry process until success or ultimate failure to dead letter if configured."); } //No Errors move to processed container if (fhirbundle.Success && ((JArray)result["errors"]).Count == 0 && ((JArray)result["throttled"]).Count == 0) { await StorageUtils.MoveTo(cbclient, "bundles", "bundlesprocessed", name, $"{name}.processed", log); await StorageUtils.WriteStringToBlob(cbclient, "bundlesprocessed", $"{name}.processed.result", fhirbundle.Content, log); log.LogInformation($"ImportFHIRBUndles Processed file Name:{name}"); } //Handle Errors from FHIR Server of proxy if (!fhirbundle.Success || ((JArray)result["errors"]).Count > 0) { await StorageUtils.MoveTo(cbclient, "bundles", "bundleserr", name, $"{name}.err", log); await StorageUtils.WriteStringToBlob(cbclient, "bundleserr", $"{name}.err.response", fhirbundle.Content, log); await StorageUtils.WriteStringToBlob(cbclient, "bundleserr", $"{name}.err.actionneeded", result.ToString(), log); log.LogInformation($"ImportFHIRBUndles File Name:{name} had errors. Moved to deadletter bundleserr directory"); } //Handle Throttled Requests inside of bundle so we will create a new bundle to retry if (fhirbundle.Success && ((JArray)result["throttled"]).Count > 0) { var nb = NDJSONConverter.initBundle(); nb["entry"] = result["throttled"]; string fn = $"retry{Guid.NewGuid().ToString().Replace("-","")}.json"; await StorageUtils.MoveTo(cbclient, "bundles", "bundlesprocessed", name, $"{name}.processed", log); await StorageUtils.WriteStringToBlob(cbclient, "bundlesprocessed", $"{name}.processed.result", fhirbundle.Content, log); await StorageUtils.WriteStringToBlob(cbclient, "bundles", fn, nb.ToString(), log); log.LogInformation($"ImportFHIRBUndles File Name:{name} had throttled resources in response bundle. Moved to processed..Created retry bunde {fn}"); } }
public static async void Run([BlobTrigger("zip/{name}", Connection = "FBI-STORAGEACCT")] Stream myBlob, string name, ILogger log) { try { int filecnt = 0; if (name.Split('.').Last().ToLower() == "zip") { CloudStorageAccount storageAccount = CloudStorageAccount.Parse(Utils.GetEnvironmentVariable("FBI-STORAGEACCT")); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer containerndjson = blobClient.GetContainerReference("ndjson"); CloudBlobContainer containerbundles = blobClient.GetContainerReference("bundles"); using (MemoryStream blobMemStream = new MemoryStream()) { log.LogInformation($"ImportCompressedFiles: Decompressing {name} ..."); await myBlob.CopyToAsync(blobMemStream); using (ZipArchive archive = new ZipArchive(blobMemStream)) { foreach (ZipArchiveEntry entry in archive.Entries) { //Replace all NO digits, letters, or "-" by a "-" Azure storage is specific on valid characters string validname = Regex.Replace(entry.Name, @"[^a-zA-Z0-9\-]", "-").ToLower(); log.LogInformation($"ImportCompressedFiles: Now processing {entry.FullName} size {FormatSize(entry.Length)}"); CloudBlobContainer destination = null; if (validname.ToLower().EndsWith("ndjson")) { destination = containerndjson; validname += ".ndjson"; } else if (validname.ToLower().EndsWith("json")) { destination = containerbundles; validname += ".json"; } if (destination != null) { CloudBlockBlob blockBlob = destination.GetBlockBlobReference(validname); using (var fileStream = entry.Open()) { await blockBlob.UploadFromStreamAsync(fileStream); } log.LogInformation($"ImportCompressedFiles: Extracted {entry.FullName} to {destination.Name}/{validname}"); } else { log.LogInformation($"ImportCompressedFiles: Entry {entry.FullName} skipped does not end in .ndjson or .json"); } filecnt++; } } } log.LogInformation($"ImportCompressedFiles: Completed Decompressing {name} extracted {filecnt} files..."); await StorageUtils.MoveTo(blobClient, "zip", "zipprocessed", name, name, log); } } catch (Exception ex) { log.LogInformation($"ImportCompressedFiles: Error! Something went wrong: {ex.Message}"); } }
public static async Task Run([EventGridTrigger] EventGridEvent blobCreatedEvent, [Blob("{data.url}", FileAccess.Read, Connection = "FBI-STORAGEACCT")] Stream myBlob, ILogger log) { int maxresourcesperbundle = 200; var cbclient = StorageUtils.GetCloudBlobClient(System.Environment.GetEnvironmentVariable("FBI-STORAGEACCT")); string mrbundlemax = System.Environment.GetEnvironmentVariable("FBI-MAXRESOURCESPERBUNDLE"); if (!string.IsNullOrEmpty(mrbundlemax)) { if (!int.TryParse(mrbundlemax, out maxresourcesperbundle)) { maxresourcesperbundle = 200; } } StorageBlobCreatedEventData createdEvent = ((JObject)blobCreatedEvent.Data).ToObject <StorageBlobCreatedEventData>(); log.LogInformation($"NDJSONConverter: Processing blob at {createdEvent.Url}..."); string name = createdEvent.Url.Substring(createdEvent.Url.LastIndexOf('/') + 1); JObject rv = initBundle(); int linecnt = 0; int total = 0; int bundlecnt = 0; int errcnt = 0; int fileno = 1; //Stream myBlob = null; StringBuilder errsb = new StringBuilder(); using (StreamReader reader = new StreamReader(myBlob)) { string line; while ((line = reader.ReadLine()) != null) { linecnt++; JObject res = null; try { res = JObject.Parse(line); addResource(rv, res); bundlecnt++; total++; } catch (Exception e) { log.LogError($"NDJSONConverter: File {name} is in error or contains invalid JSON at line number {linecnt}:{e.Message}"); errsb.Append($"{line}\n"); errcnt++; } if (bundlecnt >= maxresourcesperbundle) { await StorageUtils.WriteStringToBlob(cbclient, "bundles", $"{name}-{fileno++}.json", rv.ToString(), log); bundlecnt = 0; rv = null; rv = initBundle(); } } if (bundlecnt > 0) { await StorageUtils.WriteStringToBlob(cbclient, "bundles", $"{name}-{fileno++}.json", rv.ToString(), log); } await StorageUtils.MoveTo(cbclient, "ndjson", "ndjsonprocessed", name, $"{name}.processed", log); if (errcnt > 0) { await StorageUtils.WriteStringToBlob(cbclient, "ndjsonerr", $"{name}.err", errsb.ToString(), log); } log.LogInformation($"NDJSONConverter: Processing file {name} completed with {total} resources created in {fileno-1} bundles with {errcnt} errors..."); } }