public static void Run([BlobTrigger("stuff/{name}", Connection = "blobconn")]Stream myBlob, string name, [DataLakeStore(AccountFQDN = @"fqdn", ApplicationId = @"applicationid", ClientSecret = @"clientsecret", TenantID = @"tentantid")]out DataLakeStoreOutput dataLakeStoreOutput, TraceWriter log) { log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); var d = new DataLakeStoreOutput(); d.filename = "/mydata/" + name; d.stream = myBlob; dataLakeStoreOutput = d; }
public static void Run([BlobTrigger("stuff/{name}", Connection = "blobconn")] Stream myBlob, string name, [DataLakeStore(AccountFQDN = "%fqdn%", ApplicationId = "%applicationid%", ClientSecret = "%clientsecret%", TenantID = "%tentantid%")] out DataLakeStoreOutput dataLakeStoreOutput, ILogger log) { log.LogInformation($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); dataLakeStoreOutput = new DataLakeStoreOutput() { FileName = "/mydata/" + name, FileStream = myBlob }; }
public static void Run([EventHubTrigger("hl7msgseh", Connection = "hl7eventhub", ConsumerGroup = "hl7readerdatalake")] EventData eventData, [DataLakeStore(AccountFQDN = "%fqdn%", ApplicationId = "%applicationid%", ClientSecret = "%clientsecret%", TenantID = "%tentantid%")] out DataLakeStoreOutput dataLakeStoreOutput, ILogger log) { string messageBody = Encoding.UTF8.GetString(eventData.Body.Array, eventData.Body.Offset, eventData.Body.Count); dataLakeStoreOutput = new DataLakeStoreOutput() { FileName = "hl7data/" + DateTime.UtcNow.ToString("yyyy/MM/dd") + "/" + Guid.NewGuid().ToString("N") + ".txt", FileStream = new MemoryStream(eventData.Body.Array) }; // Replace these two lines with your processing logic. log.LogInformation($"C# Event Hub trigger function processed a message: {messageBody}"); }
public static DataLakeStoreOutput TestOutput( Stream myStream, [DataLakeStore( AccountFQDN = _AccountFQDN, ApplicationId = _ApplicationId, ClientSecret = _ClientSecret, TenantID = _TenantID)] DataLakeStoreOutput item) { var d = new DataLakeStoreOutput() { FileName = "/mydata/" + Guid.NewGuid().ToString() + ".txt", FileStream = myStream }; return(d); }
public static async Task CopyToDataLake( [BlobTrigger("insights-logs-webapplicationfirewalllogs/{blobName}.{blobExtension}", Connection = "WAFLogsStorage")] Stream inboundBlob, string blobName, string blobExtension, [DataLakeStore( AccountFQDN = "%fqdn%", ApplicationId = "%applicationid%", ClientSecret = "%clientsecret%", TenantID = "%tenantid%")] IAsyncCollector <DataLakeStoreOutput> asyncCollector, ILogger log) { var fileStream = inboundBlob; if (IsCompressedFile(blobExtension)) { try { var decompressed = await Decompress(inboundBlob); decompressed.Seek(0, SeekOrigin.Begin); fileStream = decompressed; } catch (Exception ex) { log.LogError("Failed to decompress blob:\n Name:{name}\n Reason:{reason}", blobName, ex.Message); throw; } } var dataLakeStoreOutput = new DataLakeStoreOutput() { FileName = "/mydata/" + $"{blobName}.json", FileStream = fileStream }; await asyncCollector.AddAsync(dataLakeStoreOutput); log.LogInformation($"Processed blob\n Name:{blobName} \n Size: {fileStream.Length} Bytes"); }