//private void Process(FaceRecognitionResult obj) //{ // var filePath = _fileService.SaveImage(obj.Image); // _slackClient.SlackSendFile(filePath); //} private void Process(FaceDetectionResult obj) { if (!_config.ScreenLocked || _alertSended) { return; } if (obj.Result == FaceDetectionResultType.OneFaceFound) { _faceDetectedInRow++; } else { _faceDetectedInRow = 0; } if (_faceDetectedInRow < 2) { return; } var filePath = _fileService.SaveImage(obj.Image); _slackClient.SlackSendFile(filePath); _alertSended = true; }
private void FaceDetectionResultHandler(FaceDetectionResult args) { if (_applicationConfiguration.ScreenLocked) { return; } if (args.Result == FaceDetectionResultType.FaceNotFound) { _faceNotDetectedInRow++; } else { _faceNotDetectedInRow = 0; } if (_faceNotDetectedInRow < _applicationConfiguration.FaceNotDetectedLimit) { return; } if (DateTime.Now - _mouseService.LastDeviceActivityTime < _applicationConfiguration.DeviceInactivityLimit || DateTime.Now - _keyboardService.LastDeviceActivityTime < _applicationConfiguration.DeviceInactivityLimit) { return; } _faceNotDetectedInRow = 0; NewThreadScheduler.Default.Schedule(_applicationConfiguration.CommandDelayInterval, () => _dispatcherService.Invoke(LockWorkstation)); _notificationService.SendMessage("Screen will be locked within 5 seconds. Press any key or move mouse"); }
public void Invalid() { var result = new FaceDetectionResult("errors"); Assert.False(result.IsValid); Assert.Equal("errors", result.Errors); }
private Dictionary <int, List <CoolEvent> > GetCoolEvents(FaceDetectionResult faceDetectionTracking) { //Only fragments where Events are not null var Fragments = faceDetectionTracking.Fragments.Where(x => x.Events != null).ToArray(); var idDict = GetDictionary(Fragments); return(idDict); }
public async Task ValidateProfileImage_MoreThan1Faces() { var faceDetectionClient = Create(); faceDetectionClientFake.Setup(f => f.DetectAsync(It.IsAny <string>(), false, false, null)).ReturnsAsync(new[] { new Face(), new Face() }); var expected = new FaceDetectionResult("Fant flere ansikter i bildet litt. Last opp et profilbilde av deg selv."); var faceDetectionResult = await faceDetectionClient.ValidateProfileImage(new SlackUser { Id = "id", Image = "http://image.jpg" }); Assert.Equal(expected, faceDetectionResult); }
public void Equals() { var first = FaceDetectionResult.Valid; var other = FaceDetectionResult.Valid; Assert.Equal(first, other); Assert.True(first.Equals((object)other)); Assert.False(first.Equals(null)); Assert.False(first.Equals(new object())); other = new FaceDetectionResult("errors"); Assert.NotEqual(first, other); }
private async void TakePictureButton_Clicked(object sender, EventArgs e) { try { // Initialize the camera and then allows for taking a picture // (check if this is supported) await CrossMedia.Current.Initialize(); if (!CrossMedia.Current.IsCameraAvailable || !CrossMedia.Current. IsTakePhotoSupported) { await DisplayAlert("No Camera", "No camera available.", "OK"); return; } //Take the picture and save it to the camera roll var file = await CrossMedia.Current.TakePhotoAsync(new StoreCameraMediaOptions { SaveToAlbum = true, Name = "test.jpg" }); if (file == null) { return; } this.Indicator1.IsVisible = true; this.Indicator1.IsRunning = true; // Get a stream from the image file Image1.Source = ImageSource.FromStream(() => file.GetStream()); // Analyze the specified image and then bind // the result to the UI FaceDetectionResult theData = await DetectFacesAsync(file.GetStream()); this.BindingContext = theData; this.Indicator1.IsRunning = false; this.Indicator1.IsVisible = false; } catch (Exception ex) { Debug.WriteLine("Error taking photo: " + ex); Result.Text = "Error taking photo...: " + ex; } }
public async Task <IEnumerable <FaceDetectionResult> > DetectFacesAsync(ImageRequest request) { try { // .NET の FaceAPI を呼ぶためのクラスを生成 var client = new FaceServiceClient(subscriptionKey: Secrets.CongnitiveServiceFaceApiKey, apiRoot: Consts.CognitiveServiceFaceApiEndPoint); // DetectAsync で、画像のどこに顔があって、その顔は何歳か、という情報を取得している var results = await client.DetectAsync(imageStream : new MemoryStream(request.Image), returnFaceAttributes : new[] { FaceAttributeType.Age, }); var personListId = await this.PersonListIdRepository.GetIdAsync(); // 取得した顔が誰の顔かを認識している var identifyResults = (await client.IdentifyAsync(personListId, results.Select(x => x.FaceId).ToArray())) .ToDictionary(x => x.FaceId); var l = new List <FaceDetectionResult>(); foreach (var r in results) { IdentifyResult identifyResult = null; identifyResults.TryGetValue(r.FaceId, out identifyResult); var faceDetectionResult = new FaceDetectionResult { FaceId = identifyResult?.Candidates.FirstOrDefault()?.PersonId.ToString() ?? new Guid().ToString(), Age = (int)r.FaceAttributes.Age, Rectangle = new BusinessObjects.FaceRectangle { Top = r.FaceRectangle.Top, Left = r.FaceRectangle.Left, Width = r.FaceRectangle.Width, Height = r.FaceRectangle.Height, } }; l.Add(faceDetectionResult); } return(l); } catch (FaceAPIException) { return(Enumerable.Empty <FaceDetectionResult>()); } }
private async void UploadPictureButton_Clicked(object sender, EventArgs e) { try { // Select an existing picture // (check if this is supported) if (!CrossMedia.Current.IsPickPhotoSupported) { await DisplayAlert("No upload", "Picking a photo is not supported.", "OK"); return; } // Get a reference to the image file var file = await CrossMedia.Current.PickPhotoAsync(); if (file == null) { return; } this.Indicator1.IsVisible = true; this.Indicator1.IsRunning = true; // Get a stream from the image file Image1.Source = ImageSource.FromStream(() => file.GetStream()); // Analyze the specified image and then bind // the result to the UI FaceDetectionResult theData = await DetectFacesAsync(file.GetStream()); this.BindingContext = theData; this.Indicator1.IsRunning = false; this.Indicator1.IsVisible = false; } catch (Exception ex) { Debug.WriteLine("Error in upload picture: " + ex); Result.Text = "Error in upload picture...: " + ex; } }
private void CallDLL(string fileName) { int[] top = new int[32]; int[] left = new int[32]; int[] right = new int[32]; int[] bottom = new int[32]; var size = Detect(fileName, top, left, right, bottom); Debug.LogFormat("Detect: {0} faces.", size); IList <FaceDetectionResult> list = new List <FaceDetectionResult>(); for (int i = 0; i < size; i++) { var rectangle = new FaceDetectionResult(top[i], left[i], right[i], bottom[i]); list.Add(rectangle); Debug.LogFormat("Face[{0}]: \n{1}", i, rectangle); } isDetectingFaces = false; }
public async Task <Dictionary <int, List <Image> > > GetFacesFromVideo(string path) { SetVideoResol(path); FaceDetectionResult faceDetectionResult = await GetFaceDetectionAsync(path); MessageManager.MsgManagerInstance.ReportProgress(); MessageManager.MsgManagerInstance.WriteMessage("Got Face Detection Result!!!!)))"); Dictionary <int, List <CoolEvent> > FaceIds = GetCoolEvents(faceDetectionResult); Dictionary <int, List <Image> > resultImages = new Dictionary <int, List <Image> >(); //Choose 1 first and 5 random CoolEvents FaceIds = ChooseFive(FaceIds); //Cropping faces of each person. id - unique person's number foreach (int id in FaceIds.Keys) { resultImages[id] = new List <Image>(); foreach (var curEvent in FaceIds[id]) { try { var startTimeMili = curEvent.startTime / _timeScale * 1000; GetFrame(path, startTimeMili, id); var img = ImageProcessing.ImageProcessingInstance.LoadImageFromFile($@"TempData/{id}.{(long)startTimeMili}.png"); img = ImageProcessing.ImageProcessingInstance.CropImage(img, curEvent.rec); //ImageProcessing.ImageProcessingInstance.SaveImageToFile($@"TempData/{id}.{(long)startTimeMili}Face", img, System.Drawing.Imaging.ImageFormat.Png); resultImages[id].Add(img); } catch { MessageManager.MsgManagerInstance.WriteMessage("Error with cropping"); } } } MessageManager.MsgManagerInstance.ReportProgress(); return(resultImages); }
private void FaceDetectionResultHandler(FaceDetectionResult args) { Debug.WriteLine(args.ToString()); if (args == FaceDetectionResult.FaceNotFound) { _faceNotDetectedInRow++; } else { _faceNotDetectedInRow = 0; } if (_faceNotDetectedInRow < _applicationConfiguration.FaceNotDetectedLimit) { return; } if (DateTime.Now - _mouseService.LastDeviceActivityTime < _applicationConfiguration.DeviceInactivityLimit || DateTime.Now - _keyboardService.LastDeviceActivityTime < _applicationConfiguration.DeviceInactivityLimit) { return; } LockWorkstation(); _faceNotDetectedInRow = 0; }
private async Task <FaceDetectionResult> DetectFacesAsync(Stream image) { Result.Text = ""; FaceServiceClient faceService = new FaceServiceClient("2bddec152651472a8cb690e00db31a43"); FaceDetectionResult faceDetectionResult = new FaceDetectionResult(); var requiredFaceAttributes = new FaceAttributeType[] { FaceAttributeType.Age, FaceAttributeType.Gender, FaceAttributeType.Glasses }; Face[] faces = await faceService.DetectAsync(image, returnFaceLandmarks : true, returnFaceAttributes : requiredFaceAttributes); if (faces.Length >= 1) { var edad = faces[0].FaceAttributes.Age; var genero = faces[0].FaceAttributes.Gender; int roundedAge = (int)Math.Round(edad); faceDetectionResult.FaceId = faces[0].FaceId.ToString(); faceDetectionResult.Age = faces[0].FaceAttributes.Age; faceDetectionResult.Glasses = faces[0].FaceAttributes.Glasses.ToString(); Debug.WriteLine("ID de rostro: " + faces[0].FaceId); Debug.WriteLine("Edad: " + edad); Debug.WriteLine("Género: " + genero); Debug.WriteLine("Lentes: " + faces[0].FaceAttributes.Glasses); if (faceDetectionResult.Glasses == "NoGlasses") { Guid idGuid = Guid.Parse(faces[0].FaceId.ToString()); SimilarPersistedFace[] facescomp = await faceService.FindSimilarAsync(idGuid, "21122011", 1); double conf = Double.Parse(facescomp[0].Confidence.ToString()); string pid = facescomp[0].PersistedFaceId.ToString(); Debug.WriteLine("conf: " + conf); if (conf >= .67) { Result.Text = "Posible coincidencia"; try { Query(pid); } catch (Exception ex) { Debug.WriteLine(" ex: " + ex); } } else { Result.Text = "No hay coincidencias"; } } else { Result.Text = "Try again without glasses!"; } } else { Debug.WriteLine("No faces detected: {0} ", faces.Length); Result.Text = faces.Length + " faces detected"; } return(faceDetectionResult); }