private void ShowFaceTrackingVisualization(Windows.Foundation.Size framePixelSize, IEnumerable <DetectedFace> detectedFaces)
        {
            this.FaceTrackingVisualizationCanvas.Children.Clear();

            double actualWidth  = this.FaceTrackingVisualizationCanvas.ActualWidth;
            double actualHeight = this.FaceTrackingVisualizationCanvas.ActualHeight;

            if (captureManager.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming &&
                detectedFaces != null && actualWidth != 0 && actualHeight != 0)
            {
                double widthScale  = framePixelSize.Width / actualWidth;
                double heightScale = framePixelSize.Height / actualHeight;

                foreach (DetectedFace face in detectedFaces)
                {
                    System.Diagnostics.Debug.WriteLine("ShowFaceTrackingVisualization(...) FACE DETECTED!!!");

                    RealTimeFaceIdentificationBorder faceBorder = new RealTimeFaceIdentificationBorder();
                    this.FaceTrackingVisualizationCanvas.Children.Add(faceBorder);

                    faceBorder.ShowFaceRectangle((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), (uint)(face.FaceBox.Width / widthScale), (uint)(face.FaceBox.Height / heightScale));

                    if (this.realTimeDataProvider != null)
                    {
                        EmotionScores lastEmotion = this.realTimeDataProvider.GetLastEmotionForFace(face.FaceBox);
                        if (lastEmotion != null)
                        {
                            faceBorder.ShowRealTimeEmotionData(lastEmotion);
                        }
                    }

                    if (SettingsHelper.Instance.ShowDebugInfo)
                    {
                        this.FaceTrackingVisualizationCanvas.Children.Add(new TextBlock
                        {
                            Text   = string.Format("Coverage: {0:0}%", 100 * ((double)face.FaceBox.Height / this.videoProperties.Height)),
                            Margin = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), 0, 0)
                        });
                    }
                }
            }
        }
Esempio n. 2
0
        private void ShowQRCodeTrackingVisualization(Windows.Foundation.Size framePixelSize, DetectedQRCode QRCode)
        {
            this.FaceTrackingVisualizationCanvas.Children.Clear();

            double actualWidth  = this.FaceTrackingVisualizationCanvas.ActualWidth;
            double actualHeight = this.FaceTrackingVisualizationCanvas.ActualHeight;

            if (captureManager.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming &&
                QRCode != null && actualWidth != 0 && actualHeight != 0)
            {
                double widthScale  = framePixelSize.Width / actualWidth;
                double heightScale = framePixelSize.Height / actualHeight;

                //foreach (DetectedQRCode QRCode in detectedQRCodes)
                //{
                RealTimeFaceIdentificationBorder faceBorder = new RealTimeFaceIdentificationBorder();
                this.FaceTrackingVisualizationCanvas.Children.Add(faceBorder);

                faceBorder.ShowFaceRectangle((uint)(QRCode.QRCodeBox.X / widthScale), (uint)(QRCode.QRCodeBox.Y / heightScale), (uint)(QRCode.QRCodeBox.Width / widthScale), (uint)(QRCode.QRCodeBox.Height / heightScale));

                //}
            }
        }
Esempio n. 3
0
        private void ShowFaceTrackingVisualization(Windows.Foundation.Size framePixelSize, IEnumerable <DetectedFace> detectedFaces)
        {
            this.FaceTrackingVisualizationCanvas.Children.Clear();

            double actualWidth  = this.FaceTrackingVisualizationCanvas.ActualWidth;
            double actualHeight = this.FaceTrackingVisualizationCanvas.ActualHeight;

            if (captureManager.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming &&
                detectedFaces != null && actualWidth != 0 && actualHeight != 0)
            {
                double widthScale  = framePixelSize.Width / actualWidth;
                double heightScale = framePixelSize.Height / actualHeight;

                foreach (DetectedFace face in detectedFaces)
                {
                    RealTimeFaceIdentificationBorder faceBorder = new RealTimeFaceIdentificationBorder();
                    this.FaceTrackingVisualizationCanvas.Children.Add(faceBorder);

                    faceBorder.ShowFaceRectangle((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), (uint)(face.FaceBox.Width / widthScale), (uint)(face.FaceBox.Height / heightScale));

                    if (this.realTimeDataProvider != null)
                    {
                        Scores lastEmotion = this.realTimeDataProvider.GetLastEmotionForFace(face.FaceBox);
                        if (lastEmotion != null)
                        {
                            faceBorder.ShowRealTimeEmotionData(lastEmotion);
                        }

                        Face                 detectedFace         = this.realTimeDataProvider.GetLastFaceAttributesForFace(face.FaceBox);
                        IdentifiedPerson     identifiedPerson     = this.realTimeDataProvider.GetLastIdentifiedPersonForFace(face.FaceBox);
                        SimilarPersistedFace similarPersistedFace = this.realTimeDataProvider.GetLastSimilarPersistedFaceForFace(face.FaceBox);

                        string uniqueId = null;
                        if (similarPersistedFace != null)
                        {
                            uniqueId = similarPersistedFace.PersistedFaceId.ToString("N").Substring(0, 4);
                        }

                        if (detectedFace != null && detectedFace.FaceAttributes != null)
                        {
                            if (identifiedPerson != null && identifiedPerson.Person != null)
                            {
                                // age, gender and id available
                                faceBorder.ShowIdentificationData(detectedFace.FaceAttributes.Age, detectedFace.FaceAttributes.Gender, (uint)Math.Round(identifiedPerson.Confidence * 100), identifiedPerson.Person.Name, uniqueId: uniqueId);
                            }
                            else
                            {
                                // only age and gender available
                                faceBorder.ShowIdentificationData(detectedFace.FaceAttributes.Age, detectedFace.FaceAttributes.Gender, 0, null, uniqueId: uniqueId);
                            }
                        }
                        else if (identifiedPerson != null && identifiedPerson.Person != null)
                        {
                            // only id available
                            faceBorder.ShowIdentificationData(0, null, (uint)Math.Round(identifiedPerson.Confidence * 100), identifiedPerson.Person.Name, uniqueId: uniqueId);
                        }
                        else if (uniqueId != null)
                        {
                            // only unique id available
                            faceBorder.ShowIdentificationData(0, null, 0, null, uniqueId: uniqueId);
                        }
                    }

                    if (SettingsHelper.Instance.ShowDebugInfo)
                    {
                        this.FaceTrackingVisualizationCanvas.Children.Add(new TextBlock
                        {
                            Text   = string.Format("Coverage: {0:0}%", 100 * ((double)face.FaceBox.Height / this.videoProperties.Height)),
                            Margin = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), 0, 0)
                        });
                    }
                }
            }
        }
Esempio n. 4
0
        private async void ShowFaceTrackingVisualization(Windows.Foundation.Size framePixelSize, IEnumerable <DetectedFace> detectedFaces)
        {
            this.FaceTrackingVisualizationCanvas.Children.Clear();

            double actualWidth  = this.FaceTrackingVisualizationCanvas.ActualWidth;
            double actualHeight = this.FaceTrackingVisualizationCanvas.ActualHeight;

            if (captureManager.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming &&
                detectedFaces != null && actualWidth != 0 && actualHeight != 0)
            {
                double widthScale  = framePixelSize.Width / actualWidth;
                double heightScale = framePixelSize.Height / actualHeight;



                foreach (DetectedFace face in detectedFaces)
                {
                    RealTimeFaceIdentificationBorder faceBorder = new RealTimeFaceIdentificationBorder();
                    this.FaceTrackingVisualizationCanvas.Children.Add(faceBorder);

                    faceBorder.ShowFaceRectangle((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), (uint)(face.FaceBox.Width / widthScale), (uint)(face.FaceBox.Height / heightScale));

                    if (this.realTimeDataProvider != null)
                    {
                        // EmotionScores lastEmotion = this.realTimeDataProvider.GetLastEmotionForFace(face.FaceBox);
                        //if (lastEmotion != null)
                        //{
                        //    faceBorder.ShowRealTimeEmotionData(lastEmotion);
                        //}

                        Face                 detectedFace         = this.realTimeDataProvider.GetLastFaceAttributesForFace(face.FaceBox);
                        IdentifiedPerson     identifiedPerson     = this.realTimeDataProvider.GetLastIdentifiedPersonForFace(face.FaceBox);
                        SimilarPersistedFace similarPersistedFace = this.realTimeDataProvider.GetLastSimilarPersistedFaceForFace(face.FaceBox);

                        string uniqueId = null;
                        if (similarPersistedFace != null)
                        {
                            uniqueId = similarPersistedFace.PersistedFaceId.ToString("N").Substring(0, 4);
                        }

                        if (detectedFace != null && detectedFace.FaceAttributes != null)
                        {
                            if (identifiedPerson != null && identifiedPerson.Person != null)
                            {
                                // age, gender and id available
                                faceBorder.ShowIdentificationData(detectedFace.FaceAttributes.Age, detectedFace.FaceAttributes.Gender, (uint)Math.Round(identifiedPerson.Confidence * 100), identifiedPerson.Person.Name, uniqueId: uniqueId);
                            }
                            else
                            {
                                // only age and gender available
                                faceBorder.ShowIdentificationData(detectedFace.FaceAttributes.Age, detectedFace.FaceAttributes.Gender, 0, null, uniqueId: uniqueId);
                            }
                        }
                        else if (identifiedPerson != null && identifiedPerson.Person != null)
                        {
                            // only id available
                            faceBorder.ShowIdentificationData(0, null, (uint)Math.Round(identifiedPerson.Confidence * 100), identifiedPerson.Person.Name, uniqueId: uniqueId);
                        }
                        else if (uniqueId != null)
                        {
                            // only unique id available
                            faceBorder.ShowIdentificationData(0, null, 0, null, uniqueId: uniqueId);
                        }
                    }

                    if (SettingsHelper.Instance.ShowDebugInfo)
                    {
                        this.FaceTrackingVisualizationCanvas.Children.Add(new TextBlock
                        {
                            Text   = string.Format("Coverage: {0:0}%", 100 * ((double)face.FaceBox.Height / this.videoProperties.Height)),
                            Margin = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), 0, 0)
                        });
                    }
                }
                try
                {
                    if (!Constants.isDashboard)
                    {
                        var rectangledata = Constants.materials;

                        if (Constants.materials != null || Constants.materials.Count > 0)
                        {
                            int co = rectangledata.Count;
                            foreach (var item in rectangledata)
                            {
                                if (item.Confidence == "Helmet_Face")
                                {
                                    Constants.isHelmet = true;
                                }

                                if (item.Confidence == "Fire")
                                {
                                    Constants.isFireDetected = true;
                                }

                                if (item.Confidence != "Background" && item.Confidence != "Helmet_Face")
                                {
                                    RealTimeFaceIdentificationBorder faceBorder = new RealTimeFaceIdentificationBorder();
                                    this.FaceTrackingVisualizationCanvas.Children.Add(faceBorder);

                                    var itemheight = this.FaceTrackingVisualizationCanvas.ActualHeight;
                                    var itemwidth  = this.FaceTrackingVisualizationCanvas.ActualWidth;


                                    int left   = (int)(item.ObjectRect.Left * int.Parse(Constants.x) / 1080);
                                    int width  = (int)(item.ObjectRect.Width * int.Parse(Constants.x) / 1080);
                                    int top    = (int)(item.ObjectRect.Top * int.Parse(Constants.y) / 1080);
                                    int height = (int)(item.ObjectRect.Height * int.Parse(Constants.y) / 1080);
                                    //item.ObjectRect.Left =
                                    //item.ObjectRect.Width =

                                    //item.ObjectRect.Top =
                                    //item.ObjectRect.Height =

                                    //faceBorder.ShowFaceRectangle((uint)(item.ObjectRect.Width / widthScale), (uint)(item.ObjectRect.Height / heightScale), (uint)(x / widthScale), (uint)(y / heightScale));
                                    faceBorder.ShowFaceRectangle((uint)(left / widthScale), (uint)(top - 320 / heightScale), (uint)((width - left) / widthScale), (uint)((height - top) / heightScale));
                                    //string dataforcoordinates = string.Format("height : {0} width : {1} x : {2} y : {3}", itemheight, itemwidth, y, x);

                                    if (item.Confidence == "Fire")
                                    {
                                        faceBorder.ShowIdentificationData(1, "", 0, item.Confidence, "Hazard");
                                    }
                                    else
                                    {
                                        faceBorder.ShowIdentificationData(1, "", 0, item.Confidence, "tools");
                                    }
                                }
                            }
                        }
                    }
                }
                catch (Exception ex)
                {
                }
            }
        }