/// <summary> Function which submits a frame to the Face API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the faces returned by the API. </returns>
        private async Task <LiveCameraResult> FacesAnalysisFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            // Submit image to API.
            var attrs = new List <FaceAPI.FaceAttributeType> {
                FaceAPI.FaceAttributeType.Age,
                FaceAPI.FaceAttributeType.Gender,
                FaceAPI.FaceAttributeType.HeadPose
            };

            Properties.Settings.Default.FaceAPICallCount++;
            var faces = await _faceClient.DetectAsync(jpg, returnFaceAttributes : attrs);

            var imageInfo = new Tuple <int, int>(frame.Image.Width, frame.Image.Height);

            foreach (var face in UIHelper.CalculateFaceRectangleForRendering(faces, MaxImageSize, imageInfo))
            {
                TargetFaces.Add(face);
            }
            //this.GroupId = "caeeeec7-10eb-485b-be2b-dd61513b73c5";
            var identifyResult = await _faceClient.IdentifyAsync(faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : this.GroupId);

            for (int idx = 0; idx < faces.Length; idx++)
            {
                // Update identification result for rendering
                var face = TargetFaces[idx];
                var res  = identifyResult[idx];
                if (res.Candidates.Length > 0 && Persons.Any(p => p.PersonId == res.Candidates[0].PersonId.ToString()))
                {
                    face.PersonName = Persons.Where(p => p.PersonId == res.Candidates[0].PersonId.ToString()).First().PersonName;
                    face.FaceId     = res.Candidates[0].PersonId.ToString();
                }
                else
                {
                    face.PersonName = "Unknown";
                }
            }
            // Count the API call.
            Properties.Settings.Default.FaceAPICallCount++;
            // Output.
            return(new LiveCameraResult {
                Faces = faces, TargetFaces = TargetFaces
            });
        }
Esempio n. 2
0
        /// <summary> Function which submits a frame to the Face API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the faces returned by the API. </returns>
        private async Task <LiveCameraResult> FacesIdentifyAnalysisFunctionWithClient(VideoFrame frame)
        {
            // 이미지 인코딩
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            // API에 이미지 전달
            var faces = await _faceClient.DetectAsync(jpg);

            // 얼굴 식별
            // 결과물은 식별된 사람의 정보를 포함
            var identifyResult = await _faceClient.IdentifyAsync
                                     (faces.Select(ff => ff.FaceId).ToArray(), largePersonGroupId : this.GroupId);

            Properties.Settings.Default.FaceAPICallCount++;

            // 결과물 반환
            return(new LiveCameraResult {
                Faces = faces, IdentifyResults = identifyResult
            });
        }
Esempio n. 3
0
        private async Task <LiveCameraResult> RecognizePerson(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            //Getting Face ID for the person in the camera.
            var faces = await _faceClient.DetectAsync(jpg, true, false, null);

            Guid[] faceid = new Guid[] { faces[0].FaceId };
            //Identify person
            var faceidentified = await _faceClient.IdentifyAsync("igniateam", faceid, (float)0.5, 1);

            //Similar Candidates
            var similarcandidates = faceidentified[0].Candidates.Length;

            // Count the API call.
            Properties.Settings.Default.FaceAPICallCount++;
            // Output.
            return(new LiveCameraResult {
                Faces = faces, KnownPerson = similarcandidates
            });
        }
Esempio n. 4
0
        private async Task <LiveCameraResult> ComparisonFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            var faceDetect = await _faceClient.DetectAsync(jpg, true);

            var faceIds = faceDetect.Select(face => face.FaceId).ToArray();

            var results = await _faceClient.IdentifyAsync(_personGroupId, faceIds);

            foreach (var identifyResult in results)
            {
                if (identifyResult.Candidates.Length == 0)
                {
                    //var testFaces = await _faceClient.DetectAsync(jpg);
                    //return new LiveCameraResult { Faces = testFaces };
                }
                else
                {
                    var candidateId = identifyResult.Candidates[0].PersonId;
                    var person      = await _faceClient.GetPersonAsync(_personGroupId, candidateId);

                    Console.WriteLine($"Identified as {person.Name}");
                    return(new LiveCameraResult
                    {
                        Faces = faceDetect,
                        PersonName = person.Name
                    });
                }
            }

            // Count the API call.
            Properties.Settings.Default.FaceAPICallCount++;
            // Output.
            return(new LiveCameraResult {
                Faces = faceDetect
            });
        }
Esempio n. 5
0
        private async Task <LiveAnalyzeResult> FacesAnalysisFunction(VideoFrame frame)
        {
            FaceAPI.Contract.Face[] faces = null;
            var jpg   = frame.Image.ToMemoryStream(".jpg", s_jpegParams);
            var attrs = new List <FaceAPI.FaceAttributeType> {
                FaceAPI.FaceAttributeType.Age,
                FaceAPI.FaceAttributeType.Gender,
                FaceAPI.FaceAttributeType.HeadPose
            };

            faces = await _faceClient.DetectAsync(jpg, returnFaceAttributes : attrs);

            var resultList = new List <string>();

            var faceIds = faces.Select(face => face.FaceId).ToArray();

            var identifyRes = await _faceClient.IdentifyAsync(SolutionConstant.personGroupId, faceIds);

            foreach (var identifyResult in identifyRes)
            {
                if (identifyResult.Candidates.Length > 0)
                {
                    // Get top 1 among all candidates returned, the highest scored candidate
                    var candidateId = identifyResult.Candidates[0].PersonId;
                    var person      = await _faceClient.GetPersonAsync(SolutionConstant.personGroupId, candidateId);

                    var result = $"{identifyResult.FaceId} is identified as '{person.Name}' in {SolutionConstant.personGroupId} person group!";

                    resultList.Add(result);
                }
            }

            return(new LiveAnalyzeResult()
            {
                FaceIdentifyResult = resultList.ToArray()
            });
        }
Esempio n. 6
0
        /// <summary> Function which submits a frame to the Emotion API. </summary>
        /// <param name="frame"> The video frame to submit. </param>
        /// <returns> A <see cref="Task{LiveCameraResult}"/> representing the asynchronous API call,
        ///     and containing the emotions returned by the API. </returns>
        private async Task <LiveCameraResult> AuthorizedFacesFunction(VideoFrame frame)
        {
            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            // Submit image to API.
            Face[] faces = null;

            var personNames = new List <string>();
            // See if we have local face detections for this image.
            var localFaces = (OpenCvSharp.Rect[])frame.UserData;

            if (localFaces == null || localFaces.Count() > 0)
            {
                Console.WriteLine("OpenCVSharp found faces and will submit to API");
                Properties.Settings.Default.FaceAPICallCount++;
                faces = await _faceClient.DetectAsync(jpg);

                var faceIds = faces.Select(face => face.FaceId).ToArray();
                if (faceIds.Count() > 0)
                {
                    var results = await _faceClient.IdentifyAsync(personGroupId, faceIds);

                    foreach (var identifyResult in results)
                    {
                        Console.WriteLine("Result of face: {0}", identifyResult.FaceId);
                        if (identifyResult.Candidates.Length == 0)
                        {
                            personNames.Add("Suspicious");
                        }
                        else
                        {
                            // Get top 1 among all candidates returned
                            var candidateId = identifyResult.Candidates[0].PersonId;
                            var person      = await _faceClient.GetPersonAsync(personGroupId, candidateId);

                            Console.WriteLine("Identified as {0}", person.Name);
                            personNames.Add(person.Name);

                            if (person.Name.Contains("Aaron"))
                            {
                                OAuth2Token token     = ShareFileV3Sample.Authenticate(hostname, clientId, clientSecret, username, password);
                                bool        isAllowed = ShareFileV3Sample.GetAccessControlPrincipal(token, AaronPrincipalID, ConfidentialFolderID);
                                if (!isAllowed)
                                {
                                    Security.LockWorkStation();
                                }
                            }
                        }
                    }
                }
            }
            else
            {
                Console.WriteLine("Local face detection found no faces; don't call Cognitive Services");
                // Local face detection found no faces; don't call Cognitive Services.
                faces = new Face[0];
            }
            // Output.
            return(new LiveCameraResult
            {
                Faces = faces.Select(e => CreateFace(e.FaceRectangle)).ToArray(), PersonNames = personNames
            });
        }
        private async Task <LiveCameraResult> AnalysisFunction(VideoFrame frame)
        {
            // Reset data
            await Dispatcher.BeginInvoke((Action)(() =>
            {
            }));

            // Encode image.
            var jpg = frame.Image.ToMemoryStream(".jpg", s_jpegParams);

            var faces = await _faceClient.DetectAsync(jpg);

            var faceIds = faces.Select(face => face.FaceId).ToArray();

            // Submit image to API.
            var results = await _faceClient.IdentifyAsync("residents", faceIds);

            Color?colorToUse = null;

            foreach (var identifyResult in results)
            {
                Console.WriteLine("Result of face: {0}", identifyResult.FaceId);
                if (identifyResult.Candidates.Length == 0)
                {
                    Console.WriteLine("No one identified");
                    await Dispatcher.BeginInvoke((Action)(() =>
                    {
                        VisitorImage.Visibility = Visibility.Visible;
                    }));

                    try
                    {
                        await notificationClient.SendAppleNativeNotificationAsync("{ \"elevator\": true, \"aircon\": false }");
                    }
                    catch (Exception ex)
                    {
                        // Ignore
                    }
                }
                else
                {
                    // Get top 1 among all candidates returned
                    var candidateId = identifyResult.Candidates[0].PersonId;
                    var person      = await _faceClient.GetPersonAsync("residents", candidateId);

                    Console.WriteLine("Identified as {0}", person.Name);
                    if (person.PersonId == saschaPersonId)
                    {
                        colorToUse = new Color {
                            R = 0, G = 255, B = 0, A = 255
                        };
                        await Dispatcher.BeginInvoke((Action)(() =>
                        {
                            ResidentImage.Visibility = Visibility.Visible;
                            PackageImage.Visibility = Visibility.Visible;
                        }));

                        try
                        {
                            await notificationClient.SendAppleNativeNotificationAsync("{\"aps\": { \"content-available\": 1, \"elevator\": true, \"aircon\": true }}");
                        }
                        catch (Exception ex)
                        {
                            // Ignore
                        }
                    }
                }
            }

            return(new LiveCameraResult {
                Faces = faces, Color = colorToUse
            });
        }