コード例 #1
0
 public FaceEntity(Guid id, Affdex.Face face, DateTimeOffset recordedAt)
 {
     this.face = face;
     Id = face.Id;
     FaceId = id;
     RecordedAt = recordedAt;
 }
コード例 #2
0
        public void onImageResults(Dictionary<int, Affdex.Face> faces, Affdex.Frame image)
        {
            // For now only single face is supported
            if ((faces.Count() >= 1))
            {
                Affdex.Face face = faces[0];

                UpdateClassifierPanel(face);
                DisplayFeaturePoints(image, face);
                DisplayMeasurements(face);
            }
        }
コード例 #3
0
        /// <summary>
        /// Draws the image captured from the camera.
        /// </summary>
        /// <param name="image">The image captured.</param>
        private void DrawCapturedImage(Affdex.Frame image)
        {
            // Update the Image control from the UI thread
            var result = this.Dispatcher.BeginInvoke((Action)(() =>
            {
                try
                {
                    // Update the Image control from the UI thread
                    //cameraDisplay.Source = rtb;
                    cameraDisplay.Source = ConstructImage(image.getBGRByteArray(), image.getWidth(), image.getHeight());

                    // Allow N successive OnCapture callbacks before the FacePoint drawing canvas gets cleared.
                    if (++DrawSkipCount > 4)
                    {
                        canvas.Faces = new Dictionary<int, Affdex.Face>();
                        canvas.InvalidateVisual();
                        DrawSkipCount = 0;
                    }

                    if (image != null)
                    {
                        image.Dispose();
                    }
                }
                catch (Exception ex)
                {
                    String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                    ShowExceptionAndShutDown(message);
                }
            }));
        }
コード例 #4
0
 /// <summary>
 /// Draws the facial analysis captured by Affdex.Detector.
 /// </summary>
 /// <param name="image">The image analyzed.</param>
 /// <param name="faces">The faces found in the image analyzed.</param>
 private void DrawData(Affdex.Frame image, Dictionary<int, Affdex.Face> faces)
 {
     try
     {
         // Plot Face Points
         if (faces != null)
         {
             var result = this.Dispatcher.BeginInvoke((Action)(() =>
             {
                 if ((Detector != null) && (Detector.isRunning()))
                 {
                     canvas.Faces = faces;
                     canvas.Width = cameraDisplay.ActualWidth;
                     canvas.Height = cameraDisplay.ActualHeight;
                     canvas.XScale = canvas.Width / image.getWidth();
                     canvas.YScale = canvas.Height / image.getHeight();
                     canvas.InvalidateVisual();                            
                     DrawSkipCount = 0;
                 }
             }));
         }
     }
     catch(Exception ex)
     {
         String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
         ShowExceptionAndShutDown(message);
     }
 }
コード例 #5
0
 /// <summary>
 /// Handles occurence of exception produced by Affdex.Detector
 /// </summary>
 /// <param name="ex">The <see cref="Affdex.AffdexException"/> instance containing the exception details.</param>
 public void onProcessingException(Affdex.AffdexException ex)
 {
     String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
     ShowExceptionAndShutDown(message);
 }
コード例 #6
0
 /// <summary>
 /// Handles the Image capture from source produced by Affdex.Detector
 /// </summary>
 /// <param name="image">The <see cref="Affdex.Frame"/> instance containing the image captured from camera.</param>
 public void onImageCapture(Affdex.Frame image)
 {
     DrawCapturedImage(image);
 }
コード例 #7
0
 /// <summary>
 /// Handles the Image results event produced by Affdex.Detector
 /// </summary>
 /// <param name="faces">The detected faces.</param>
 /// <param name="image">The <see cref="Affdex.Frame"/> instance containing the image analyzed.</param>
 public void onImageResults(Dictionary<int, Affdex.Face> faces, Affdex.Frame image)
 {
     DrawData(image, faces);
 }
コード例 #8
0
        private void DisplayImageToOffscreenCanvas(Affdex.Frame image)
        {
            // Update the Image control from the UI thread
            var result = this.Dispatcher.BeginInvoke((Action)(() =>
            {
                try
                {
                    mCurrentTimeStamp = image.getTimestamp();

                    // Update the Image control from the UI thread
                    //imgAffdexFaceDisplay.Source = rtb;
                    imgAffdexFaceDisplay.Source = ConstructImage(image.getBGRByteArray(), image.getWidth(), image.getHeight());

                    // Allow N successive OnCapture callbacks before the FacePoint drawing canvas gets cleared.
                    if (++mFeaturePointsSkipCount > 4)
                    {
                        canvasFacePoints.Children.Clear();
                        mFeaturePointsSkipCount = 0;
                    }

                    if (image != null)
                    {
                        image.Dispose();
                    }
                }
                catch (Exception ex)
                {
                    String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                    ShowExceptionAndShutDown(message);
                }
            }));
        }
コード例 #9
0
        /// <summary>
        /// Since the panel is getting updated from a separate callback thread, access to controls must be
        /// made through BeginInvoke()
        /// </summary>
        /// <param name="face"></param>
        private void UpdateClassifierPanel(Affdex.Face face = null)
        {
            try
            {
                bool displayClassifiers = (imgAffdexFaceDisplay.Visibility == Visibility.Hidden)? false : true;

                if (mCameraDetector.isRunning() == true)
                {
                    // A Face was found - this comes from ImageResults CallBack
                    if (face != null)
                    {
                        int index = 0;
                        foreach (String metric in mEnabledClassifiers)
                        {
                            PropertyInfo info;
                            float value = -1;
                            if ((info = face.Expressions.GetType().GetProperty(NameMappings(metric))) != null) value = (float)info.GetValue(face.Expressions, null);
                            else if ((info = face.Emotions.GetType().GetProperty(NameMappings(metric))) != null) value = (float)info.GetValue(face.Emotions, null);
                           
                            // Convert classifier value to Integer (percentage) for display purposes
                            mAffdexClassifierValues[index] = Convert.ToInt32(Math.Round(value, MidpointRounding.AwayFromZero));
                            index++;

                        }

                        // Reset the cache count
                        mCachedSkipFaceResultsCount = 0;
                        mFirstFaceRecognized =
                        displayClassifiers = true;
                    }
                    else if (mFirstFaceRecognized == false)
                    {
                        displayClassifiers = false;
                    }
                    else if (++mCachedSkipFaceResultsCount > 10)
                    {
                        for (int r = 0; r < mAffdexClassifierValues.Count(); r++) mAffdexClassifierValues[r] = 0;

                        // If we haven't seen a face in the past 30 frames (roughly 30/15fps seconds), don't display the classifiers
                        if (mCachedSkipFaceResultsCount >= 30)
                        {
                            displayClassifiers = false;
                        }
                    }

                    var result = this.Dispatcher.BeginInvoke((Action)(() =>
                    {
                        // Only display the classifiers and FacePoints if we've had a re
                        if (displayClassifiers)
                        {
                            int r = 0;
                            foreach (String classifier in mEnabledClassifiers)
                            {
                                String stackPanelName = String.Format("stackPanel{0}", r);
                                TextBlock ClassifierName = (TextBlock) gridClassifierDisplay.FindName(String.Format("{0}Name", stackPanelName));
                                TextBlock ClassifierValueBackgroud = (TextBlock)gridClassifierDisplay.FindName(String.Format("{0}ValueBackgroud", stackPanelName));
                                TextBlock ClassifierValue = (TextBlock)gridClassifierDisplay.FindName(String.Format("{0}Value", stackPanelName));
                                // Update the Classifier Display
                                UpdateClassifier(ClassifierName, ClassifierValue, ClassifierValueBackgroud, classifier, r);
                                r++;

                            }
                        }

                        // Update the Image control from the UI thread
                        if ((mCameraDetector != null) && (mCameraDetector.isRunning()))
                        {
                            if (imgAffdexFaceDisplay.Visibility == Visibility.Hidden)
                            {
                                imgAffdexFaceDisplay.Visibility =
                                stackPanelClassifiersBackground.Visibility = 
                                stackPanelLogoBackground.Visibility = Visibility.Visible;
                            }
                            stackPanelClassifiers.Visibility = (displayClassifiers)?Visibility.Visible : Visibility.Hidden;
                            interocularDistanceDisplay.Visibility = (displayClassifiers && mShowMeasurements) ? Visibility.Visible : Visibility.Hidden;
                            pitchDisplay.Visibility = (displayClassifiers && mShowMeasurements) ? Visibility.Visible : Visibility.Hidden;
                            yawDisplay.Visibility = (displayClassifiers && mShowMeasurements) ? Visibility.Visible : Visibility.Hidden;
                            rollDisplay.Visibility = (displayClassifiers && mShowMeasurements) ? Visibility.Visible : Visibility.Hidden;
                        }
                    }));
                }
            }
            catch (Exception ex)
            {
                String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                ShowExceptionAndShutDown(message);
            }
        }
コード例 #10
0
        private void DisplayFeaturePoints(Affdex.Frame affdexImage, Affdex.Face affdexFace)
        {
            try
            {
                // Plot Face Points
                if ((mShowFacePoints) && (affdexFace != null))
                {
                    var result = this.Dispatcher.BeginInvoke((Action)(() =>
                    {
                        if ((mCameraDetector != null) && (mCameraDetector.isRunning()))
                        {
                            // Clear the previous points
                            canvasFacePoints.Children.Clear();
                            canvasFacePoints.Width = imgAffdexFaceDisplay.ActualWidth;
                            canvasFacePoints.Height = imgAffdexFaceDisplay.ActualHeight;

                            mImageXScaleFactor = imgAffdexFaceDisplay.ActualWidth / affdexImage.getWidth();
                            mImageYScaleFactor = imgAffdexFaceDisplay.ActualHeight / affdexImage.getHeight();

                            SolidColorBrush pointBrush = new SolidColorBrush(Colors.Cornsilk);
                            var featurePoints = affdexFace.FeaturePoints;
                            foreach (var point in featurePoints)
                            {
                                Ellipse ellipse = new Ellipse()
                                {
                                    Width = 4,
                                    Height = 4,
                                    Fill = pointBrush
                                };

                                canvasFacePoints.Children.Add(ellipse);
                                Canvas.SetLeft(ellipse, point.X * mImageXScaleFactor);
                                Canvas.SetTop(ellipse, point.Y * mImageYScaleFactor);
                            }

                            // Draw Face Bounding Rectangle
                            var xMax = featurePoints.Max(r => r.X);
                            var xMin = featurePoints.Min(r => r.X);
                            var yMax = featurePoints.Max(r => r.Y);
                            var yMin = featurePoints.Min(r => r.Y);

                            // Adjust the x/y min to accomodate all points
                            xMin -= 2;
                            yMin -= 2;

                            // Increase the width/height to accomodate the entire max pixel position
                            // EllipseWidth + N to make sure max points in the box
                            double width = (xMax - xMin + 6) * mImageXScaleFactor;
                            double height = (yMax - yMin + 6) * mImageYScaleFactor;

                            SolidColorBrush boundingBrush = new SolidColorBrush(Colors.Bisque);
                            Rectangle boundingBox = new Rectangle()
                            {
                                Width = width,
                                Height = height,
                                Stroke = boundingBrush,
                                StrokeThickness = 1,
                            };

                            canvasFacePoints.Children.Add(boundingBox);
                            Canvas.SetLeft(boundingBox, xMin * mImageXScaleFactor);
                            Canvas.SetTop(boundingBox, yMin * mImageYScaleFactor);

                            mFeaturePointsSkipCount = 0;
                        }
                    }));
                }
            }
            catch(Exception ex)
            {
                String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                ShowExceptionAndShutDown(message);
            }
        }
コード例 #11
0
 private void DisplayMeasurements(Affdex.Face affdexFace)
 {
     //Update measurements
    try
    {
         var result = this.Dispatcher.BeginInvoke((Action)(() =>
             {
                 if (mShowMeasurements && (affdexFace != null))
                 {
                     interocularDistanceDisplay.Text = String.Format("Interocular Distance: {0}", affdexFace.Measurements.InterocularDistance);
                     pitchDisplay.Text = String.Format("Pitch Angle: {0}", affdexFace.Measurements.Orientation.Pitch);
                     yawDisplay.Text = String.Format("Yaw Angle: {0}", affdexFace.Measurements.Orientation.Yaw);
                     rollDisplay.Text = String.Format("Roll Angle: {0}", affdexFace.Measurements.Orientation.Roll);
                 }
             }));
     }
     catch(Exception ex)
     {
         String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
         ShowExceptionAndShutDown(message);
     }
 }
コード例 #12
0
        public void onImageCapture(Affdex.Frame image) 
	    {
            UpdateClassifierPanel();
            DisplayImageToOffscreenCanvas(image);
        }
コード例 #13
0
        public void onImageResults(Dictionary<int, Affdex.Face> faces, Affdex.Frame image)
        {            

            // For now only single face is supported
            if ((faces.Count() >= 1))
            {
                              
                Affdex.Face face = faces[0];

                if (face.Id == 0)
                    guid = Guid.NewGuid();

                UpdateClassifierPanel(face);
                DisplayFeaturePoints(image, face);
                DisplayMeasurements(face);

                FaceEntity entity = new FaceEntity(guid, face, DateTimeOffset.Now);
#if DEBUG 
                faceModel.FaceEntities.Add(entity);
                //foreach (Affdex.FeaturePoint point in face.FeaturePoints)
                //{
                //    faceModel.FeaturePointEntities.Add(new FeaturePointEntity(guid, face.Id, point));
                //}
               
                faceModel.SaveChanges();


                using(Stream stream = new MemoryStream())
                {
                    using (StreamReader reader = new StreamReader(stream))
                    {
                        serialiser.WriteObject(stream, entity);
                        stream.Position = 0;
                        System.Diagnostics.Debug.WriteLine(reader.ReadToEnd());
                    }
                }
#endif
                hubClient.Send(new EventData(entity, serialiser));
            }
        }
コード例 #14
0
 public FaceEntity(Guid id, Affdex.Face face) : this(id, face, DateTimeOffset.Now) { }
コード例 #15
0
 public FeaturePointEntity(Guid guid, int id, Affdex.FeaturePoint point)
 {
     this.point = point;
     FaceGuid = guid;
 }