public static async Task <HttpResponseMessage> Run([HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)] HttpRequestMessage req, TraceWriter log) { log.Info("C# HTTP trigger function processed a request."); HttpResponseMessage response; try { PhotoToProcessDTO photoToProcessDTO = JsonConvert.DeserializeObject <PhotoToProcessDTO>(await req.Content.ReadAsStringAsync()); PhotoInfoDTO photoInfo = ProcessPhotoAsync(photoToProcessDTO.PhotoAsByteArray, photoToProcessDTO.RecognizeEmotions, log); if (photoInfo.FoundAndProcessedFaces) { SaveToDatabase(photoToProcessDTO.PhotoAsByteArray, photoInfo, log); } var json = JsonConvert.SerializeObject(photoInfo); response = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(json, Encoding.UTF8, "application/json") }; } catch (Exception e) { response = new HttpResponseMessage(HttpStatusCode.BadRequest) { Content = new StringContent("{\"exception_message\": \"" + e.Message + "\"}", Encoding.UTF8, "application/json") }; } return(response); }
private static async Task SaveToDatabase(byte[] photoAsByteArray, PhotoInfoDTO photoInfoDTO, TraceWriter log) { string new_elem_guid = Guid.NewGuid().ToString(); SingleFaceFaceAPIInfoDTO singleFaceFaceApiInfo = new SingleFaceFaceAPIInfoDTO { FaceRectangle = new Rectangle(0, 0, 0, 0), Age = double.Parse(photoInfoDTO.Age), Emotion = photoInfoDTO.Emotion, Gender = photoInfoDTO.Gender }; SavePhotoMetaToDatabase(new_elem_guid, singleFaceFaceApiInfo, "photosInfo", log); SavePhotoToDatabase(new_elem_guid, photoAsByteArray, "photos", log); }
public static PhotoInfoDTO ProcessPhotoAsync(byte[] photoAsByteArray, bool recognizeEmotions, TraceWriter log) { HttpClient client = new HttpClient(); string emotion = ""; FaceAPIInfoDTO faceAPIInfoDTO = new FaceAPIInfoDTO() { Age = "", FaceCountAsString = "", Gender = "", FoundAndProcessedFaces = false }; List <SingleFaceFaceAPIInfoDTO> facesInfo = new List <SingleFaceFaceAPIInfoDTO>(); Task[] tasks = new Task[2]; tasks[0] = Task.Run(async() => { if (recognizeEmotions) { try { emotion = (await RecognizeEmotionsAsync(client, photoAsByteArray)).Item1; } catch (Exception e) { log.Error("Error when using emotions api! Exception message: " + e.Message); } } }); tasks[1] = Task.Run(async() => { try { var analysisResult = await AnalyzeFacesAsync(client, photoAsByteArray); faceAPIInfoDTO = analysisResult.Item1; facesInfo = analysisResult.Item2; } catch (Exception e) { log.Error("Error when using face api! Exception message: " + e.Message); } }); Task.WaitAll(tasks); CropAndSaveToDb(facesInfo, photoAsByteArray, log); PhotoInfoDTO photoInfo = new PhotoInfoDTO { Age = faceAPIInfoDTO.Age, Emotion = emotion, FaceCountAsString = faceAPIInfoDTO.FaceCountAsString, Gender = faceAPIInfoDTO.Gender, FoundAndProcessedFaces = faceAPIInfoDTO.FoundAndProcessedFaces }; return(photoInfo); }