private void DisplayImageToOffscreenCanvas(Affdex.Frame image)
        {
            // Update the Image control from the UI thread
            var result = this.Dispatcher.BeginInvoke((Action)(() =>
            {
                try
                {
                    mCurrentTimeStamp = image.getTimestamp();

                    // Update the Image control from the UI thread
                    //imgAffdexFaceDisplay.Source = rtb;
                    imgAffdexFaceDisplay.Source = ConstructImage(image.getBGRByteArray(), image.getWidth(), image.getHeight());

                    // Allow N successive OnCapture callbacks before the FacePoint drawing canvas gets cleared.
                    if (++mFeaturePointsSkipCount > 4)
                    {
                        canvasFacePoints.Children.Clear();
                        mFeaturePointsSkipCount = 0;
                    }

                    if (image != null)
                    {
                        image.Dispose();
                    }
                }
                catch (Exception ex)
                {
                    String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                    ShowExceptionAndShutDown(message);
                }
            }));
        }
Esempio n. 2
0
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame frame)
        {
            process_fps            = 1.0f / (frame.getTimestamp() - process_last_timestamp);
            process_last_timestamp = frame.getTimestamp();
            System.Console.WriteLine(" pfps: {0}", process_fps.ToString());

            byte[] pixels = frame.getBGRByteArray();
            this.img = new Bitmap(frame.getWidth(), frame.getHeight(), PixelFormat.Format24bppRgb);
            var        bounds  = new Rectangle(0, 0, frame.getWidth(), frame.getHeight());
            BitmapData bmpData = img.LockBits(bounds, ImageLockMode.WriteOnly, img.PixelFormat);
            IntPtr     ptr     = bmpData.Scan0;

            int data_x    = 0;
            int ptr_x     = 0;
            int row_bytes = frame.getWidth() * 3;

            // The bitmap requires bitmap data to be byte aligned.
            // http://stackoverflow.com/questions/20743134/converting-opencv-image-to-gdi-bitmap-doesnt-work-depends-on-image-size

            for (int y = 0; y < frame.getHeight(); y++)
            {
                Marshal.Copy(pixels, data_x, ptr + ptr_x, row_bytes);
                data_x += row_bytes;
                ptr_x  += bmpData.Stride;
            }
            img.UnlockBits(bmpData);

            this.faces = faces;
            //rwLock.ReleaseWriterLock();

            this.Invalidate();
            frame.Dispose();
        }
Esempio n. 3
0
        /// <summary>
        /// Draws the image captured from the camera.
        /// </summary>
        /// <param name="image">The image captured.</param>
        private void DrawCapturedImage(object sender, FaceWatcherEventArgs e)
        {
            // Update the Image control from the UI thread
            var result = this.Dispatcher.BeginInvoke((Action)(() =>
            {
                try
                {
                    Affdex.Frame image = e.Frame;
                    // Update the Image control from the UI thread
                    //cameraDisplay.Source = rtb;
                    cameraDisplay.Source = ConstructImage(image.getBGRByteArray(), image.getWidth(), image.getHeight());

                    // Allow N successive OnCapture callbacks before the FacePoint drawing canvas gets cleared.
                    if (++DrawSkipCount > 4)
                    {
                        canvas.Faces = new Dictionary <int, Affdex.Face>();
                        canvas.InvalidateVisual();
                        DrawSkipCount = 0;
                    }

                    if (image != null)
                    {
                        image.Dispose();
                    }
                }
                catch (Exception ex)
                {
                    DataManager.ShowExceptionAndShutDown(ex);
                }
            }));
        }
Esempio n. 4
0
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame image)
        {
            foreach (KeyValuePair <int, Affdex.Face> pair in faces)
            {
                Affdex.Face face  = pair.Value;
                float       value = -1;

                float  max_value    = float.MinValue;
                string emotion_name = "";

                foreach (PropertyInfo info in typeof(Affdex.Emotions).GetProperties())
                {
                    value = (float)info.GetValue(face.Emotions, null);
                    if (counter < 9)
                    {
                        emotions.Add(new EmotionResult {
                            Name = info.Name, Value = value
                        });
                        counter++;
                    }

                    if (max_value < value)
                    {
                        max_value    = value;
                        emotion_name = info.Name;
                        Trace.WriteLine(info.Name);
                    }
                }
                playSound(emotion_name);
            }
            image.Dispose();
        }
Esempio n. 5
0
        /// <summary>
        /// Draws the facial analysis captured by Affdex.Detector.
        /// </summary>
        /// <param name="image">The image analyzed.</param>
        /// <param name="faces">The faces found in the image analyzed.</param>
        private void DrawData(object sender, FaceWatcherEventArgs e)
        {
            try
            {
                Affdex.Frame image = e.Frame;
                Dictionary <int, Affdex.Face> faces = e.Faces;

                // Plot Face Points
                if (faces != null)
                {
                    var result = this.Dispatcher.BeginInvoke((Action)(() =>
                    {
                        if ((DataManager.FaceWatcher.Detector != null) && (DataManager.FaceWatcher.Detector.isRunning()))
                        {
                            canvas.Faces = faces;
                            canvas.Width = cameraDisplay.ActualWidth;
                            canvas.Height = cameraDisplay.ActualHeight;
                            canvas.XScale = canvas.Width / image.getWidth();
                            canvas.YScale = canvas.Height / image.getHeight();
                            canvas.InvalidateVisual();
                            DrawSkipCount = 0;
                        }
                    }));
                }
            }
            catch (Exception ex)
            {
                DataManager.ShowExceptionAndShutDown(ex);
            }
        }
 /// <summary>
 /// Draws the facial analysis captured by Affdex.Detector.
 /// </summary>
 /// <param name="image">The image analyzed.</param>
 /// <param name="faces">The faces found in the image analyzed.</param>
 private void DrawData(Affdex.Frame image, Dictionary <int, Affdex.Face> faces)
 {
     try
     {
         // Plot Face Points
         if (faces != null)
         {
             var result = this.Dispatcher.BeginInvoke((Action)(() =>
             {
                 if ((Detector != null) && (Detector.isRunning()))
                 {
                     canvas.Faces = faces;
                     canvas.Width = cameraDisplay.ActualWidth;
                     canvas.Height = cameraDisplay.ActualHeight;
                     canvas.XScale = canvas.Width / image.getWidth();
                     canvas.YScale = canvas.Height / image.getHeight();
                     canvas.InvalidateVisual();
                     DrawSkipCount = 0;
                 }
             }));
         }
     }
     catch (Exception ex)
     {
         String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
         ShowExceptionAndShutDown(message);
     }
 }
        /// <summary>
        /// Draws the image captured from the camera.
        /// </summary>
        /// <param name="image">The image captured.</param>
        private void DrawCapturedImage(Affdex.Frame image)
        {
            // Update the Image control from the UI thread
            var result = this.Dispatcher.BeginInvoke((Action)(() =>
            {
                try
                {
                    // Update the Image control from the UI thread
                    //cameraDisplay.Source = rtb;
                    cameraDisplay.Source = ConstructImage(image.getBGRByteArray(), image.getWidth(), image.getHeight());

                    // Allow N successive OnCapture callbacks before the FacePoint drawing canvas gets cleared.
                    if (++DrawSkipCount > 4)
                    {
                        canvas.Faces = new Dictionary <int, Affdex.Face>();
                        canvas.InvalidateVisual();
                        DrawSkipCount = 0;
                    }

                    if (image != null)
                    {
                        image.Dispose();
                    }
                }
                catch (Exception ex)
                {
                    String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                    ShowExceptionAndShutDown(message);
                }
            }));
        }
Esempio n. 8
0
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame frame)
        {
            process_fps            = 1.0f / (frame.getTimestamp() - process_last_timestamp);
            process_last_timestamp = frame.getTimestamp();

            //Console.WriteLine(process_fps);

            try
            {
                byte[] pixels = frame.getBGRByteArray();
                this.img = new Bitmap(frame.getWidth(), frame.getHeight(), PixelFormat.Format24bppRgb);

                if (this.saveWebCam && this.start)
                {
                    this.saveWebCam = false;
                    //this.vWritter = new VideoWriter( this.videoPath, CvInvoke.CV_FOURCC('X', 'V', 'I', 'D'), (int)framePerSeconds, this.img.Width, this.img.Height, true);
                    this.vWritter = new VideoWriter(this.videoPath, CvInvoke.CV_FOURCC('M', 'J', 'P', 'G'), (int)framePerSeconds, this.img.Width, this.img.Height, true);
                    //this.vWritter = new VideoWriter(this.videoPath, CvInvoke.CV_FOURCC('M', 'P', '4', '2'), (int)framePerSeconds, this.img.Width, this.img.Height, true);
                }

                var        bounds  = new Rectangle(0, 0, frame.getWidth(), frame.getHeight());
                BitmapData bmpData = img.LockBits(bounds, ImageLockMode.WriteOnly, img.PixelFormat);
                IntPtr     ptr     = bmpData.Scan0;

                int data_x    = 0;
                int ptr_x     = 0;
                int row_bytes = frame.getWidth() * 3;

                // The bitmap requires bitmap data to be byte aligned.
                // http://stackoverflow.com/questions/20743134/converting-opencv-image-to-gdi-bitmap-doesnt-work-depends-on-image-size

                for (int y = 0; y < frame.getHeight(); y++)
                {
                    Marshal.Copy(pixels, data_x, ptr + ptr_x, row_bytes);
                    data_x += row_bytes;
                    ptr_x  += bmpData.Stride;
                }
                img.UnlockBits(bmpData);

                if (this.vWritter != null && this.start)
                {
                    //Bitmap bmp = img.Clone(new Rectangle(0, 0, img.Width, img.Height), img.PixelFormat);
                    //this.vWritter.WriteFrame(new Image<Bgr, byte>( bmp ));

                    this.vWritter.WriteFrame(new Image <Bgr, byte>(this.img));
                }
            }
            catch (Exception e)
            {
                System.Console.WriteLine(e.ToString());
            }

            this.faces = faces;
            //rwLock.ReleaseWriterLock();

            this.Invalidate();
            frame.Dispose();
        }
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame image)
        {
            // For now only single face is supported
            if ((faces.Count() >= 1))
            {
                Affdex.Face face = faces[0];

                UpdateClassifierPanel(face);
                DisplayFeaturePoints(image, face);
                DisplayMeasurements(face);
            }
        }
Esempio n. 10
0
 public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame frame)
 {
     foreach (KeyValuePair <int, Affdex.Face> pair in faces)
     {
         Affdex.Face face = pair.Value;
         if (faces != null)
         {
             foreach (PropertyInfo prop in typeof(Affdex.Emotions).GetProperties())
             {
                 float  value  = (float)prop.GetValue(face.Emotions, null);
                 String output = String.Format("{0}: {1:0.00}", prop.Name, value);
                 System.Console.WriteLine(output);
             }
         }
     }
     frame.Dispose();
 }
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame image)
        {
            // For now only single face is supported
            if ((faces.Count() >= 1))
            {
                Affdex.Face face = faces[0];

                if (face.Id == 0)
                {
                    guid = Guid.NewGuid();
                }

                UpdateClassifierPanel(face);
                DisplayFeaturePoints(image, face);
                DisplayMeasurements(face);

                FaceEntity entity = new FaceEntity(guid, face, DateTimeOffset.Now);
#if DEBUG
                faceModel.FaceEntities.Add(entity);
                //foreach (Affdex.FeaturePoint point in face.FeaturePoints)
                //{
                //    faceModel.FeaturePointEntities.Add(new FeaturePointEntity(guid, face.Id, point));
                //}

                faceModel.SaveChanges();


                using (Stream stream = new MemoryStream())
                {
                    using (StreamReader reader = new StreamReader(stream))
                    {
                        serialiser.WriteObject(stream, entity);
                        stream.Position = 0;
                        System.Diagnostics.Debug.WriteLine(reader.ReadToEnd());
                    }
                }
#endif
                hubClient.Send(new EventData(entity, serialiser));
            }
        }
        /// <summary>
        /// This Is meant to be apply to a thread so it can constantly run can check for frames and not stop the program
        /// first make sure off is set to false
        /// then while not off check for frames and pass to filewriterforvideo
        /// when is off
        /// exit current while loop
        /// go to loop to finish out frames to process
        /// </summary>
        void FilerStreamerThread()
        {
            moff = false;

            while (!moff)
            {
                if (mCameraAffectivaRecorder.lastframe.Count > 0)
                {
                    Affdex.Frame frame = mCameraAffectivaRecorder.lastframe.Dequeue();
                    mFilerWriterForVideo.AddBitmap(frame.getWidth(), frame.getHeight(), frame.getBGRByteArray(), frame.getColorFormat(), frame.getTimestamp());
                    System.Threading.Thread.Sleep(1);
                }
            }

            while (mCameraAffectivaRecorder.lastframe.Count > 0)
            {
                Affdex.Frame frame = mCameraAffectivaRecorder.lastframe.Dequeue();
                mFilerWriterForVideo.AddBitmap(frame.getWidth(), frame.getHeight(), frame.getBGRByteArray(), frame.getColorFormat(), frame.getTimestamp());
                System.Threading.Thread.Sleep(1);
            }
            mFilerWriterForVideo.CloseFileSaver();
        }
Esempio n. 13
0
 public void onImageCapture(Affdex.Frame frame)
 {
     frame.Dispose();
 }
Esempio n. 14
0
 public void onImageCapture(Affdex.Frame image)
 {
     image.Dispose();
 }
Esempio n. 15
0
        static void processVideo(String pVideo, CmdOptions options)
        {
            try
            {
                Affdex.Detector detector = null;
                List <string>   imgExts  = new List <string> {
                    ".bmp", ".jpg", ".gif", ".png", ".jpe"
                };
                List <string> vidExts = new List <string> {
                    ".avi", ".mov", ".flv", ".webm", ".wmv", ".mp4"
                };

                bool isImage = imgExts.Any <string>(s => (pVideo.Contains(s) || pVideo.Contains(s.ToUpper())));

                if (isImage)
                {
                    System.Console.WriteLine("Trying to process a bitmap image..." + options.Input.ToString());
                    detector = new Affdex.PhotoDetector((uint)options.numFaces, (Affdex.FaceDetectorMode)options.faceMode);
                }
                else
                {
                    System.Console.WriteLine("Trying to process a video file..." + options.Input.ToString());
                    detector = new Affdex.VideoDetector(60F, (uint)options.numFaces, (Affdex.FaceDetectorMode)options.faceMode);
                }

                if (detector != null)
                {
                    System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone();
                    customCulture.NumberFormat.NumberDecimalSeparator = ".";

                    System.Threading.Thread.CurrentThread.CurrentCulture = customCulture;

                    string pV = Directory.GetParent(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData)).FullName;
                    if (Environment.OSVersion.Version.Major >= 6)
                    {
                        pV = Directory.GetParent(pV).ToString();
                    }

                    Boolean      research   = false;
                    StreamWriter outputFile = null;

                    string Fname      = Path.Combine(@pV, "blinkValues_" + DateTime.Now.ToString("yyMMdd_hhmmss") + ".txt");
                    string fileHeader = "";

                    fileHeader += "Blink\tEye-Aspect-Rate\t";
                    fileHeader += "frame";

                    System.Console.WriteLine(fileHeader);
                    outputFile = new StreamWriter(Fname);

                    outputFile.WriteLine(fileHeader);

                    ProcessVideo videoForm = new ProcessVideo(detector, 15);

                    videoForm.setOutputVideoFileLog(outputFile);

                    detector.setClassifierPath(options.DataFolder);
                    detector.setDetectAllEmotions(true);
                    detector.setDetectAllExpressions(true);
                    detector.setDetectAllEmojis(true);
                    detector.setDetectAllAppearances(true);
                    detector.start();
                    System.Console.WriteLine("Face detector mode = " + detector.getFaceDetectorMode().ToString());

                    if (isImage)
                    {
                        Affdex.Frame img = LoadFrameFromFile(options.Input);

                        ((Affdex.PhotoDetector)detector).process(img);
                    }
                    else
                    {
                        ((Affdex.VideoDetector)detector).process(options.Input);
                    }

                    videoForm.ShowDialog();
                    videoForm.Dispose();
                    //videoForm = null;

                    outputFile.Close();

                    detector.stop();
                    detector.Dispose();
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine("ERROR: " + ex.Message);
            }
        }
        private void DisplayFeaturePoints(Affdex.Frame affdexImage, Affdex.Face affdexFace)
        {
            try
            {
                // Plot Face Points
                if ((mShowFacePoints) && (affdexFace != null))
                {
                    var result = this.Dispatcher.BeginInvoke((Action)(() =>
                    {
                        if ((mCameraDetector != null) && (mCameraDetector.isRunning()))
                        {
                            // Clear the previous points
                            canvasFacePoints.Children.Clear();
                            canvasFacePoints.Width = imgAffdexFaceDisplay.ActualWidth;
                            canvasFacePoints.Height = imgAffdexFaceDisplay.ActualHeight;

                            mImageXScaleFactor = imgAffdexFaceDisplay.ActualWidth / affdexImage.getWidth();
                            mImageYScaleFactor = imgAffdexFaceDisplay.ActualHeight / affdexImage.getHeight();

                            SolidColorBrush pointBrush = new SolidColorBrush(Colors.Cornsilk);
                            var featurePoints = affdexFace.FeaturePoints;
                            foreach (var point in featurePoints)
                            {
                                Ellipse ellipse = new Ellipse()
                                {
                                    Width = 4,
                                    Height = 4,
                                    Fill = pointBrush
                                };

                                canvasFacePoints.Children.Add(ellipse);
                                Canvas.SetLeft(ellipse, point.X * mImageXScaleFactor);
                                Canvas.SetTop(ellipse, point.Y * mImageYScaleFactor);
                            }

                            // Draw Face Bounding Rectangle
                            var xMax = featurePoints.Max(r => r.X);
                            var xMin = featurePoints.Min(r => r.X);
                            var yMax = featurePoints.Max(r => r.Y);
                            var yMin = featurePoints.Min(r => r.Y);

                            // Adjust the x/y min to accomodate all points
                            xMin -= 2;
                            yMin -= 2;

                            // Increase the width/height to accomodate the entire max pixel position
                            // EllipseWidth + N to make sure max points in the box
                            double width = (xMax - xMin + 6) * mImageXScaleFactor;
                            double height = (yMax - yMin + 6) * mImageYScaleFactor;

                            SolidColorBrush boundingBrush = new SolidColorBrush(Colors.Bisque);
                            Rectangle boundingBox = new Rectangle()
                            {
                                Width = width,
                                Height = height,
                                Stroke = boundingBrush,
                                StrokeThickness = 1,
                            };

                            canvasFacePoints.Children.Add(boundingBox);
                            Canvas.SetLeft(boundingBox, xMin * mImageXScaleFactor);
                            Canvas.SetTop(boundingBox, yMin * mImageYScaleFactor);

                            mFeaturePointsSkipCount = 0;
                        }
                    }));
                }
            }
            catch (Exception ex)
            {
                String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                ShowExceptionAndShutDown(message);
            }
        }
 public void onImageCapture(Affdex.Frame image)
 {
     UpdateClassifierPanel();
     DisplayImageToOffscreenCanvas(image);
 }
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame frame)
        {
            process_fps            = 1.0f / (frame.getTimestamp() - process_last_timestamp);
            process_last_timestamp = frame.getTimestamp();
            // System.Console.WriteLine(" pfps: {0}", process_fps.ToString());

            byte[] pixels = frame.getBGRByteArray();
            this.img = new Bitmap(frame.getWidth(), frame.getHeight(), PixelFormat.Format24bppRgb);
            var        bounds  = new Rectangle(0, 0, frame.getWidth(), frame.getHeight());
            BitmapData bmpData = img.LockBits(bounds, ImageLockMode.WriteOnly, img.PixelFormat);
            IntPtr     ptr     = bmpData.Scan0;


            int data_x    = 0;
            int ptr_x     = 0;
            int row_bytes = frame.getWidth() * 3;

            // The bitmap requires bitmap data to be byte aligned.
            // http://stackoverflow.com/questions/20743134/converting-opencv-image-to-gdi-bitmap-doesnt-work-depends-on-image-size

            for (int y = 0; y < frame.getHeight(); y++)
            {
                Marshal.Copy(pixels, data_x, ptr + ptr_x, row_bytes);
                data_x += row_bytes;
                ptr_x  += bmpData.Stride;
            }
            img.UnlockBits(bmpData);

            this.faces = faces;
            string[] emotionArr = new string[7];
            float[]  indexArr   = new float[7];

            if (myFlag == 0)
            {
                //Hifza
                //initialize socket for data transfer to java server
                //myNetworks.myNetwork.StartClient("192.168.123.2", 55555);
                myNetworks.myNetwork.StartClient("localhost", 54322);
                myNetworks4.myNetwork4P.Main(9999);
                myFlag = 1;
            }


            foreach (KeyValuePair <int, Affdex.Face> pair in faces)
            {
                Affdex.Face face = pair.Value;
                if (face != null)
                {
                    int a = 0;
                    foreach (PropertyInfo prop in typeof(Affdex.Emotions).GetProperties())
                    {
                        float value = (float)prop.GetValue(face.Emotions, null);

                        string output = string.Format("{0}: {1}", prop.Name, value);
                        if (prop.Name != "Engagement" && prop.Name != "Valence")
                        {
                            emotionArr[a] = prop.Name;
                            indexArr[a]   = value;
                            a             = a + 1;
                        }
                        output = frame.getTimestamp().ToString() + " " + output;

                        //System.IO.File.AppendAllText(@"C:\Users\artmed\Documents\sangwonlee\Affdex_Outputs\Affdex_Results.txt", DateTime.Now.ToString("hh.mm.ss.ffffff") + " " + output + " " + Environment.NewLine);
                        //System.Console.WriteLine(output);
                    }
                    float  maxValue   = indexArr.Max();
                    int    maxIndex   = indexArr.ToList().IndexOf(maxValue);
                    string maxEmotion = emotionArr[maxIndex];


                    //determine arousal/valence values from for emotion name
                    string[] emoArr = new string[18];
                    double[] valArr = new double[18];
                    double[] arArr  = new double[18];
                    //string type으로 데이터 통신이 안될수도 있으니까 일단 숫자(어레이 넘버!)로 보내보자!

                    //emoArr[0] = "Neutral/Default"; valArr[0] = 5; arArr[0] = 5;
                    // emoArr[1] = "Excited"; valArr[1] = 11; arArr[1] = 20;
                    emoArr[0] = "Joy"; valArr[0] = 20; arArr[0] = 15;
                    //emoArr[3] = "Curious"; valArr[3] = 8; arArr[3] = 8;
                    //emoArr[4] = "Sleepy"; valArr[4] = 5; arArr[4] = -20;
                    //emoArr[5] = "Tired"; valArr[5] = 1; arArr[5] = -19;
                    //emoArr[6] = "Gloomy/Crying"; valArr[6] = -17; arArr[6] = -11;
                    emoArr[1] = "Sadness"; valArr[1] = -17; arArr[1] = -6;
                    //emoArr[8] = "Dizzy/Distressed"; valArr[8] = -17; arArr[8] = 6;
                    //emoArr[9] = "Frustrated"; valArr[9] = -18; arArr[9] = 14;
                    emoArr[2] = "Anger"; valArr[2] = -13; arArr[2] = 16;
                    emoArr[3] = "Fear"; valArr[3] = -15; arArr[3] = 20;
                    //emoArr[12] = "Celebrating"; valArr[12] = 20; arArr[12] = 20;
                    //emoArr[13] = "Wanting"; valArr[13] = 10; arArr[13] = 17;
                    //emoArr[14] = "Bored"; valArr[14] = -15; arArr[14] = -18;
                    emoArr[4] = "Disgust"; valArr[4] = -13; arArr[4] = 13;
                    //emoArr[16] = "Unhappy"; valArr[16] = -19; arArr[16] = -3;
                    //emoArr[17] = "Nervous/Tense"; valArr[17] = -10; arArr[17] = 15;
                    emoArr[5] = "Surprise"; valArr[5] = 0; arArr[5] = 13;
                    emoArr[6] = "Contempt"; valArr[6] = -13; arArr[6] = 6;

                    //double emotion2 = 0;
                    double valence = 0; double arousal = 0;
                    for (int i = 0; i < a; i++)
                    {
                        if (maxEmotion == emoArr[i])
                        {
                            //emotion2 = i;
                            valence = valArr[i];
                            arousal = arArr[i];
                            break;
                        }
                    }

                    string sendStr1 = "A " + maxEmotion + " " + arousal.ToString() + " " + valence.ToString();// + "\n";
                    //string sendStr1 = maxEmotion;
                    string sendStr2 = maxEmotion + " " + maxValue.ToString();
                    //System.Console.WriteLine("\n"+sendStr1+ "\n");
                    //flag prevents repeated socket creation

                    faceEmotion curEmo = new faceEmotion();
                    for (int i = 0; i < a; i++)
                    {
                        if (Convert.ToString(valence) == "20" && Convert.ToString(arousal) == "15")
                        {
                            curEmo.Joy = indexArr[i];
                        }
                        else if (Convert.ToString(valence) == "-17" && Convert.ToString(arousal) == "-6")
                        {
                            curEmo.Sadness = indexArr[i];
                        }
                        else if (Convert.ToString(valence) == "-13" && Convert.ToString(arousal) == "16")
                        {
                            curEmo.Anger = indexArr[i];
                        }
                        else if (Convert.ToString(valence) == "-15" && Convert.ToString(arousal) == "20")
                        {
                            curEmo.Fear = indexArr[i];
                        }
                        else if (Convert.ToString(valence) == "-13" && Convert.ToString(arousal) == "13")
                        {
                            curEmo.Disgust = indexArr[i];
                        }
                        else if (Convert.ToString(valence) == "0" && Convert.ToString(arousal) == "13")
                        {
                            curEmo.Surprise = indexArr[i];
                        }
                        else if (Convert.ToString(valence) == "-13" && Convert.ToString(arousal) == "6")
                        {
                            curEmo.Contempt = indexArr[i];
                        }
                    }
                    curEmo.maxEmo = maxValue;

                    facialExpressions curExprs;
                    curExprs.attention          = face.Expressions.Attention;
                    curExprs.browFurrow         = face.Expressions.BrowFurrow;
                    curExprs.browRaise          = face.Expressions.BrowRaise;
                    curExprs.cheekRaise         = face.Expressions.CheekRaise;
                    curExprs.chinRaise          = face.Expressions.ChinRaise;
                    curExprs.dimpler            = face.Expressions.Dimpler;
                    curExprs.eyeClosure         = face.Expressions.EyeClosure;
                    curExprs.eyeWiden           = face.Expressions.EyeWiden;
                    curExprs.innerBrowRaise     = face.Expressions.InnerBrowRaise;
                    curExprs.jawDrop            = face.Expressions.JawDrop;
                    curExprs.lidTighten         = face.Expressions.LidTighten;
                    curExprs.lipCornerDepressor = face.Expressions.LipCornerDepressor;
                    curExprs.lipPress           = face.Expressions.LipPress;
                    curExprs.lipPucker          = face.Expressions.LipPucker;
                    curExprs.lipStretch         = face.Expressions.LipStretch;
                    curExprs.lipSuck            = face.Expressions.LipSuck;
                    curExprs.mouthOpen          = face.Expressions.MouthOpen;
                    curExprs.noseWrinkle        = face.Expressions.NoseWrinkle;
                    curExprs.smile         = face.Expressions.Smile;
                    curExprs.smirk         = face.Expressions.Smirk;
                    curExprs.upperLipRaise = face.Expressions.UpperLipRaise;

                    string tempOut = string.Format("{0}    {1}  {2} {3}",
                                                   curExprs.cheekRaise, curExprs.smile, curExprs.lipSuck, curExprs.chinRaise);
                    //System.Console.WriteLine(tempOut + "\n");

                    //System.Console.WriteLine(myNetworks4.myNetwork4P.pythonLabel() + "\n");

                    byte[] expRawdDta = Serialize(curExprs);

                    myOrientation tempOrientation;
                    tempOrientation.roll  = face.Measurements.Orientation.Roll;
                    tempOrientation.pitch = face.Measurements.Orientation.Pitch;
                    tempOrientation.yaw   = face.Measurements.Orientation.Yaw;

                    byte[] oriRawdata = Serialize(tempOrientation);
                    //Serialize(tempOrientation, data2send);

                    //얼굴 감정 분석 결과를 보내는 코드
                    byte[] emoRawdata = Serialize(curEmo);

                    //System.Console.WriteLine("sfdhgaetrhartfhbagfbrstfdhbatfgearfgaertg\n");
                    pythonLabel tempt;
                    tempt.num = myNetworks4.myNetwork4P.pythonLabel();
                    //System.Console.WriteLine("sfdhgaetrhartfhbagfbrstfdhbatfgearfgaertg\n");
                    //System.Console.WriteLine(tempt.num + "\n");

                    byte[] labelData = Serialize(tempt);

                    //byte[] data2send = new byte[expRawdDta.Length + oriRawdata.Length + 1];
                    byte[] data2send = new byte[expRawdDta.Length + oriRawdata.Length + emoRawdata.Length + labelData.Length + 1];

                    data2send[0] = (byte)(data2send.Length);
                    Array.Copy(oriRawdata, 0, data2send, 1, oriRawdata.Length);
                    Array.Copy(expRawdDta, 0, data2send, (1 + oriRawdata.Length), expRawdDta.Length);
                    Array.Copy(emoRawdata, 0, data2send, (1 + oriRawdata.Length + expRawdDta.Length), emoRawdata.Length);
                    Array.Copy(labelData, 0, data2send, (1 + oriRawdata.Length + expRawdDta.Length + emoRawdata.Length), labelData.Length);

                    //Hifza
                    //send data to java server through socket
                    if (myFlag == 1)
                    {
                        if (!myNetworks.myNetwork.SendData(data2send))
                        {
                            //myNetworks.myNetwork.CloseClient();
                            //this.Invalidate();
                            //frame.Dispose();
                            //Environment.Exit(0);
                            try       //try에서 에러가 날 경우, catch가 실행되면서 문구만 나타나게 되는데 이에 대한 종료가 필요한 것이 아닌가?
                            {
                                this.Close();
                                //this.Invalidate();
                                //detector.stop();
                                //frame.Dispose();
                            }
                            catch (Exception ex)
                            {
                                System.Console.WriteLine("Closing.");
                                Console.WriteLine("\nMessage ---\n{0}", ex.Message);
                            }
                        }
                    }


                    //added by Hifza: to output expressions and emojis in addition to the emotions output in the code above
                    //output expressions
                    //foreach (PropertyInfo prop in typeof(Affdex.Expressions).GetProperties())
                    //{
                    //    float value = (float)prop.GetValue(face.Expressions, null);
                    //    string output = string.Format("{0}: {1}", prop.Name, value);
                    //    //System.Console.WriteLine(output);

                    //    //System.IO.File.AppendAllText(@"C:\Users\artmed\Desktop\sangwonlee\Affdex_Outputs\Affdex_Log.txt",
                    //    //DateTime.Now.ToString("hh.mm.ss.ffffff") + " " + output + " " + Environment.NewLine);

                    //    System.IO.File.AppendAllText(@"C:\Users\artmed\Documents\sangwonlee\Affdex_Outputs\Affdex_Results.txt",
                    //   DateTime.Now.ToString("hh.mm.ss.ffffff") + " " + output + " " + Environment.NewLine);
                    //    System.Console.WriteLine(output);
                    //    //Hifza
                    //    //send data to java server through socket
                    //    //myNetworks.myNetwork.SendData(output);
                    //}
                    //output emojis
                    //foreach (PropertyInfo prop in typeof(Affdex.Emojis).GetProperties())
                    //{
                    //    float value = (float)prop.GetValue(face.Emojis, null);
                    //    string output = string.Format("{0}: {1}", prop.Name, value);
                    //    //System.Console.WriteLine(output);

                    //    //System.IO.File.AppendAllText(@"C:\Users\artmed\Desktop\sangwonlee\Affdex_Outputs\Affdex_Log.txt",
                    //    //DateTime.Now.ToString("hh.mm.ss.ffffff") + " " + output + " " + Environment.NewLine);

                    //    System.IO.File.AppendAllText(@"C:\Users\artmed\Documents\sangwonlee\Affdex_Outputs\Affdex_Results.txt",
                    //   DateTime.Now.ToString("hh.mm.ss.ffffff") + " " + output + " " + Environment.NewLine);
                    //    System.Console.WriteLine(output);
                    //    //Hifza
                    //    //send data to java server through socket
                    //    //myNetworks.myNetwork.SendData(output);
                    //}
                    // System.Console.WriteLine(" ");
                    //System.IO.File.AppendAllText(@"C:\Users\artmed\Desktop\sangwonlee\Affdex_Outputs\Affdex_Log.txt", Environment.NewLine);
                    //System.IO.File.AppendAllText(@"C:\Users\artmed\Documents\sangwonlee\Affdex_Outputs\Affdex_Results.txt", Environment.NewLine);
                }
            }

            this.Invalidate();
            frame.Dispose();
        }
Esempio n. 19
0
 /// <summary>
 /// Handles the Image results event produced by Affdex.Detector
 /// </summary>
 /// <param name="faces">The detected faces.</param>
 /// <param name="image">The <see cref="Affdex.Frame"/> instance containing the image analyzed.</param>
 public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame image)
 {
     DrawData(image, faces);
 }
Esempio n. 20
0
 /// <summary>
 /// Handles the Image capture from source produced by Affdex.Detector
 /// </summary>
 /// <param name="image">The <see cref="Affdex.Frame"/> instance containing the image captured from camera.</param>
 public void onImageCapture(Affdex.Frame image)
 {
     DrawCapturedImage(image);
 }