public static async Task <HttpResponseMessage> Run( [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = "DetectsCheck/{imageName}")] HttpRequestMessage req, string imageName, [Table("detects", " ", "{imageName}", Connection = "AzureWebJobsStorage")] DetectEntity detect, TraceWriter log) { if (detect == null) { return(req.CreateResponse(HttpStatusCode.OK, DetectEntity.DetectState.NotProcessed)); } return(req.CreateResponse(HttpStatusCode.OK, detect.Faces)); }
public static void Run([BlobTrigger("photos/{name}", Connection = "AzureWebJobsStorage")] Stream photo, string name, [DocumentDB("SignalData", "Faces", ConnectionStringSetting = "CosmosDbConnection")] out dynamic document, [Table("detects", Connection = "AzureWebJobsStorage")] out DetectEntity detectCheck, TraceWriter log) { log.Info($"Processing:{name} \n Size: {photo.Length} Bytes"); //DEBUG //document = null; //return; var result = new DetectionResult() { DateTime = DateTime.Now, Source = DetectionResult.SOURCE_PRODUCTION, Image = Utils.EncodeStreamToBase64(photo), Tags = new List <Tag>() }; // Send image to FaceAPI MemoryStream detectPhotoStream = new MemoryStream(); photo.CopyTo(detectPhotoStream); detectPhotoStream.Seek(0, SeekOrigin.Begin); if (detectPhotoStream.Length == 0) { log.Error("Image size 0."); document = null; detectCheck = new DetectEntity(name, 0); return; } Face firstFace = GetFirstFaceAsync(detectPhotoStream, log).Result; if (firstFace == null) { document = null; detectCheck = new DetectEntity(name, 0); return; } detectCheck = new DetectEntity(name, 1); result.Tags.Add(new Tag("Age", firstFace.FaceAttributes.Age)); result.Tags.Add(new Tag("Gender", firstFace.FaceAttributes.Gender)); result.Tags.Add(new Tag("Smile", firstFace.FaceAttributes.Smile, firstFace.FaceAttributes.Smile)); int glasses = firstFace.FaceAttributes.Glasses > 0 ? 1 : 0; result.Tags.Add(new Tag("Glasses", glasses, glasses)); Tuple <string, double> facialHair = GetTopFacialHair(firstFace.FaceAttributes.FacialHair); result.Tags.Add(new Tag("FacialHair", facialHair.Item1, facialHair.Item2)); result.FaceRectangle = firstFace.FaceRectangle; if (Environment.GetEnvironmentVariable("UseTagRecognition", EnvironmentVariableTarget.Process).ToLower() == bool.TrueString.ToLower()) { // Send image to Custom Vision var predictionClient = new CustomVisionClient( Environment.GetEnvironmentVariable("PredictionKey"), Environment.GetEnvironmentVariable("PredictionEndpoint")); PredictionResult predRes = predictionClient.PredictAsync(photo).Result; if (predRes == null) { log.Info("Prediction result empty."); } else { // Put results into single JSON // - results coming from API are ordered by Probabilty, taking first 4 var firstResults = predRes.Predictions.Take(4); foreach (var pr in firstResults) { string category = CategoryMapping.Where(c => c.Value.Contains(pr.Tag)).FirstOrDefault().Key; result.Tags.Add(new Tag(category, pr.Tag, pr.Probability)); } } } // Send to front-end API using (var hc = new HttpClient()) { var res = hc.PostAsync(Environment.GetEnvironmentVariable("SaveApiEndpoint"), new StringContent(JsonConvert.SerializeObject(result), System.Text.Encoding.UTF8, "application/json")).Result; if (res.IsSuccessStatusCode) { log.Info("Data sent to API."); } else { log.Error("Unable to send data to API. (" + res.Content.ReadAsStringAsync().Result + ")"); } } // Save to DocumentDB result.Image = null; // neukládáme obrázek document = result; log.Info(JsonConvert.SerializeObject(result.Tags, Formatting.Indented)); }