async Task <PhotoInfoDTO> ProcessPhotoAsync(byte[] photoAsByteArray, bool recognizeEmotions) { Log.Trace($"BEFORE {GetType().Name}.ProcessPhotoAsync()", Log.LogFlag.Debug); PhotoToProcessDTO photoToProcessDTO = new PhotoToProcessDTO { PhotoAsByteArray = photoAsByteArray, RecognizeEmotions = recognizeEmotions }; var json = JsonConvert.SerializeObject(photoToProcessDTO); PhotoInfoDTO photoInfoDTO; if (!offline) { using (StringContent content = new StringContent(json, Encoding.UTF8, "application/json")) { try { Log.Trace($"{GetType().Name}.ProcessPhotoAsync(): sent to network"); HttpResponseMessage response = await httpClient.PostAsync(Config.CognitiveEndpoint, content); Log.Trace($"{GetType().Name}.ProcessPhotoAsync(): received a responce from network", Log.LogFlag.Debug); if (response.StatusCode.Equals(HttpStatusCode.OK)) { photoInfoDTO = JsonConvert.DeserializeObject <PhotoInfoDTO>(await response.Content.ReadAsStringAsync()); } else { Log.Trace($"{GetType().Name}.ProcessPhotoAsync(): No faces found and analyzed", Log.LogFlag.Debug); photoInfoDTO = new PhotoInfoDTO { FoundAndProcessedFaces = false }; } } catch (Exception e) { Log.Trace($"{GetType().Name}.ProcessPhotoAsync(): Error! Exception message: " + e.Message, Log.LogFlag.Error); photoInfoDTO = new PhotoInfoDTO { FoundAndProcessedFaces = false }; } } } else { photoInfoDTO = new PhotoInfoDTO(); photoInfoDTO.Age = "offline"; photoInfoDTO.Emotion = "offline"; photoInfoDTO.FaceCountAsString = "offline"; photoInfoDTO.Gender = "offline"; photoInfoDTO.FoundAndProcessedFaces = true; } Log.Trace($"AFTER {GetType().Name}.ProcessPhotoAsync()", Log.LogFlag.Debug); return(photoInfoDTO); }
async Task <bool> RecognizeFace() { Log.Trace($"BEFORE {GetType().Name}.RecognizeFace()", Log.LogFlag.Debug); long currMilliseconds = DateTimeOffset.Now.ToUnixTimeMilliseconds(); if (currMilliseconds - 1000L < Interlocked.Exchange(ref lastRecognizingTimeStampMillis, currMilliseconds)) { Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE will wait", Log.LogFlag.Debug); await Task.Delay(TimeSpan.FromMilliseconds(1000)); Interlocked.Exchange(ref lastRecognizingTimeStampMillis, DateTimeOffset.Now.ToUnixTimeMilliseconds()); Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER will wait", Log.LogFlag.Debug); } if (!IsFacePresent) { Log.Trace($"AFTER {GetType().Name}.RecognizeFace(): IsFacePresent='false'", Log.LogFlag.Debug); return(false); } Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE check Event state", Log.LogFlag.Debug); if (RE.State.ContainsKey("Event")) { if (RE.State["Event"] == "FacePreOut") { RE.SetVar("Event", "FaceIn"); } } Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER check Event state", Log.LogFlag.Debug); FaceWaitTimer.Stop(); var photoAsStream = new MemoryStream(); Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE capture a photo", Log.LogFlag.Debug); await MC.CapturePhotoToStreamAsync(ImageEncodingProperties.CreateJpeg(), photoAsStream.AsRandomAccessStream()); Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER capture a photo, BEFORE serialized", Log.LogFlag.Debug); byte[] photoAsByteArray = photoAsStream.ToArray(); Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER capture a photo, AFTER serialized", Log.LogFlag.Debug); Log.Trace($"IN {GetType().Name}.RecognizeFace() BEFORE ProcessPhotoAsync()", Log.LogFlag.Debug); PhotoInfoDTO photoInfo = await ProcessPhotoAsync(photoAsByteArray, Config.RecognizeEmotions); Log.Trace($"IN {GetType().Name}.RecognizeFace() AFTER ProcessPhotoAsync()", Log.LogFlag.Debug); if (photoInfo.FoundAndProcessedFaces) { if (photoInfo.Age == "offline") { RE.SetVar("FaceCount", "1"); RE.SetVar("Gender", "offline"); RE.SetVar("Age", "-1"); RE.SetVar("Emotion", "offline"); RE.SetVar("offline", "True"); } else { RE.SetVar("FaceCount", photoInfo.FaceCountAsString); RE.SetVar("Gender", photoInfo.Gender); RE.SetVar("Age", photoInfo.Age); if (Config.RecognizeEmotions) { RE.SetVar("Emotion", photoInfo.Emotion); } } Log.Trace($"AFTER {GetType().Name}.RecognizeFace(): FaceCount='{RE.State.Eval("FaceCount")}', " + $"Age='{RE.State.Eval("Age")}', Gender='{RE.State.Eval("Gender")}', Emotion='{RE.State.Eval("Emotion")}'", Log.LogFlag.Debug); return(true); } else { FaceWaitTimer.Start(); Log.Trace($"AFTER {GetType().Name}.RecognizeFace(): FaceCount='0'", Log.LogFlag.Debug); return(false); } }