public async Task Run( [BlobTrigger("photos/{name}", Connection = "face_blob")] Stream inputBlob, string name, [Blob("thumb/{name}", FileAccess.Write)] Stream outBlob, ILogger log) { log.LogInformation("C# Blob trigger function Processed blob Name {name} and Size of {size} Bytes", name, inputBlob.Length); using (MemoryStream memStream = new MemoryStream()) { await inputBlob.CopyToAsync(memStream, 1024); inputBlob.Seek(0, SeekOrigin.Begin); memStream.Seek(0, SeekOrigin.Begin); log.LogInformation("Calling face detection api"); var result = await _faceApp.DetectFaceExtract(memStream); using (var image = Image.FromStream(inputBlob)) { Graphics graph = Graphics.FromImage(image); Pen pen = new Pen(Brushes.Red, 1.7f); graph.DrawRectangle(pen, new Rectangle(result.Left, result.Top, result.Width, result.Height)); _logger.LogInformation("found face like left {left} top {top} width {width} and height {height}", result.Left, result.Top, result.Width, result.Height); using (MemoryStream stream = new MemoryStream()) { FileInfo fInfo = new FileInfo(name); switch (fInfo.Extension) { case "png": case ".png": image.Save(stream, System.Drawing.Imaging.ImageFormat.Png); break; default: image.Save(stream, System.Drawing.Imaging.ImageFormat.Jpeg); break; } stream.Seek(0, SeekOrigin.Begin); await stream.CopyToAsync(outBlob); } } } }
public async Task <HttpResponseMessage> Run( [HttpTrigger(AuthorizationLevel.Function, "post", Route = null)] HttpRequestMessage req, ILogger log) { log.LogInformation("C# HTTP trigger function processed a request."); string requestBody = await req.Content.ReadAsStringAsync(); dynamic data = JsonConvert.DeserializeObject(requestBody); string baseUri = data?.baseUri; var result = await _faceApp.DetectFaceExtract(baseUri); return(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(JsonConvert.SerializeObject(result), Encoding.UTF8, "application/json") }); }
public async Task Run( [BlobTrigger("photos/{name}", Connection = "face_blob")] Stream inputBlob, string name, ILogger log) { log.LogInformation($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {inputBlob.Length} Bytes"); log.LogInformation("Calling face detection api"); using (MemoryStream memStream = new MemoryStream()) { await inputBlob.CopyToAsync(memStream, 1024); inputBlob.Seek(0, SeekOrigin.Begin); memStream.Seek(0, SeekOrigin.Begin); var result = await _faceApp.DetectFaceExtract(memStream); using (var image = Image.FromStream(inputBlob)) { Graphics graph = Graphics.FromImage(image); graph.Clear(Color.Azure); Pen pen = new Pen(Brushes.Black); graph.DrawLines(pen, new Point[] { new Point(result.Left, result.Top), new Point(result.Left + result.Width, result.Top + result.Height) }); Rectangle rect = new Rectangle(100, 100, 300, 300); graph.DrawRectangle(pen, rect); using (MemoryStream memStreamThumb = new MemoryStream()) { image.Save(memStreamThumb, System.Drawing.Imaging.ImageFormat.Png); memStreamThumb.Seek(0, SeekOrigin.Begin); await CreateBlockBlobAsync(name, memStreamThumb); } } } }