示例#1
0
        private void DrawCapturedImage(Affdex.Frame image)
        {
            var result = this.Dispatcher.BeginInvoke((Action)(() =>
            {
                try
                {
                    cameraDisplay.Source = ConstructImage(image.getBGRByteArray(), image.getWidth(), image.getHeight());

                    if (++DrawSkipCount > 4)
                    {
                        canvas.Faces = new Dictionary <int, Affdex.Face>();
                        canvas.InvalidateVisual();
                        DrawSkipCount = 0;
                    }

                    if (image != null)
                    {
                        image.Dispose();
                    }
                }
                catch (Exception ex)
                {
                    String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                    ShowExceptionAndShutDown(message);
                }
            }));
        }
示例#2
0
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame frame)
        {
            process_fps            = 1.0f / (frame.getTimestamp() - process_last_timestamp);
            process_last_timestamp = frame.getTimestamp();
            System.Console.WriteLine(" pfps: {0}", process_fps.ToString());

            byte[] pixels = frame.getBGRByteArray();
            this.img = new Bitmap(frame.getWidth(), frame.getHeight(), PixelFormat.Format24bppRgb);
            var        bounds  = new Rectangle(0, 0, frame.getWidth(), frame.getHeight());
            BitmapData bmpData = img.LockBits(bounds, ImageLockMode.WriteOnly, img.PixelFormat);
            IntPtr     ptr     = bmpData.Scan0;

            int data_x    = 0;
            int ptr_x     = 0;
            int row_bytes = frame.getWidth() * 3;

            // The bitmap requires bitmap data to be byte aligned.
            // http://stackoverflow.com/questions/20743134/converting-opencv-image-to-gdi-bitmap-doesnt-work-depends-on-image-size

            for (int y = 0; y < frame.getHeight(); y++)
            {
                Marshal.Copy(pixels, data_x, ptr + ptr_x, row_bytes);
                data_x += row_bytes;
                ptr_x  += bmpData.Stride;
            }
            img.UnlockBits(bmpData);

            this.faces = faces;
            //rwLock.ReleaseWriterLock();

            this.Invalidate();
            frame.Dispose();
        }
示例#3
0
    private void OnMultiSourceFrameArrived(object sender, Perception.Kinect.MultiSourceFrameArrivedEventArgs e)
    {
        if (e.ColorFrame != null)
        {
            //UnityEngine.Debug.LogError("frame received!");
            sw.Stop();
            totalTime += sw.Elapsed.TotalSeconds;
            DataStore.SetValue("Time:per:frame", new DataStore.StringValue((++i / totalTime).ToString()), this, sw.Elapsed.ToString());
            sw.Restart();
            //var closestBodyFrame = new ClosestBodyFrame(e.BodyFrame, engageMin, engageMax);
            //var headColorFrame = new HeadColorFrame(e.ColorFrame, closestBodyFrame);
            //var colorFrame = new Perception.Frames.ColorFrame(e.ColorFrame);

            Bitmap b = cropAtRect(ColorImageFrameToBitmap(e.ColorFrame), rectangle);

            byte[] data       = BitmaptoArray(b);
            var    colorArray = new Color32[data.Length / 4];
            for (var i = 0; i < data.Length; i += 4)
            {
                var color = new Color32(data[i + 0], data[i + 1], data[i + 2], data[i + 3]);
                colorArray[i / 4] = color;
            }
            //Color32[] data = colorFrame.GetBytes();


            frame = new Affdex.Frame(colorArray, b.Width, b.Height, e.Timestamp);
            if (detector.IsRunning)
            {
                detector.ProcessFrame(frame);
            }
        }
        //else { UnityEngine.Debug.LogError("frame null!"); }
    }
示例#4
0
 private void DrawData(Affdex.Frame image, Dictionary <int, Affdex.Face> faces)
 {
     try
     {
         if (faces != null)
         {
             var result = this.Dispatcher.BeginInvoke((Action)(() =>
             {
                 if ((Detector != null) && (Detector.isRunning()))
                 {
                     canvas.Faces = faces;
                     canvas.Width = cameraDisplay.ActualWidth;
                     canvas.Height = cameraDisplay.ActualHeight;
                     canvas.XScale = canvas.Width / image.getWidth();
                     canvas.YScale = canvas.Height / image.getHeight();
                     canvas.InvalidateVisual();
                     DrawSkipCount = 0;
                 }
             }));
         }
     }
     catch (Exception ex)
     {
         String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
         ShowExceptionAndShutDown(message);
     }
 }
示例#5
0
        public void onImageCapture(Affdex.Frame frame)
        {
            #region 使用一下代码测试帧捕获的图片是否可以正常生成图片,如果不能则是输入给帧的像素的数组有问题
            var    len         = frame.getBGRByteArrayLength();
            byte[] imageData   = frame.getBGRByteArray();//这里捕获的数据 不同于生成该frame时的buff 并且是3通道的数据
            int    width       = frame.getWidth();
            int    height      = frame.getHeight();
            var    ColorFormat = frame.getColorFormat();

            if (imageData != null && imageData.Length > 0)
            {
                var _stride  = (width * System.Windows.Media.PixelFormats.Rgb24.BitsPerPixel + 7) / 8;
                var imageSrc = System.Windows.Media.Imaging.BitmapSource.Create(width, height, 96d, 96d, System.Windows.Media.PixelFormats.Bgr24,
                                                                                null, imageData, _stride);

                System.Windows.Media.Imaging.BitmapEncoder encoder = new System.Windows.Media.Imaging.PngBitmapEncoder();
                encoder.Frames.Add(System.Windows.Media.Imaging.BitmapFrame.Create(imageSrc));
                using (var stream =
                           new System.IO.FileStream(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory,
                                                                           "我是分析前图片.png"), System.IO.FileMode.Create))
                {
                    encoder.Save(stream);
                }
            }
            #endregion
        }
示例#6
0
        public void onImageResults(Dictionary <int, Face> faces, Affdex.Frame frame)
        {
            Face face = null;

            if (faces != null && faces.Values != null && faces.Values.Count() > 0)
            {
                face = faces.Values.First();//因为我们的Detector只识别了一个脸,所以这里最多只有一个数据
            }
            int top    = (int)face.FeaturePoints.Min(r => r.X);
            int left   = (int)face.FeaturePoints.Min(r => r.Y);
            int bottom = (int)face.FeaturePoints.Max(r => r.X);
            int right  = (int)face.FeaturePoints.Max(r => r.Y);

            ImageHelper.cutPicture(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "timg.jpg"),
                                   left, top, right, bottom - top);
        }
示例#7
0
        /// <summary>
        /// Handles the Image capture from source produced by Affdex.Detector
        /// </summary>
        /// <param name="image">The <see cref="Affdex.Frame"/> instance containing the image captured from camera.</param>
        public void onImageCapture(Affdex.Frame frame)
        {
            byte[] imageData = frame.getBGRByteArray();
            int    width     = frame.getWidth();
            int    height    = frame.getHeight();

            try
            {
                if (imageData != null && imageData.Length > 0)
                {
                    var stride   = (width * PixelFormats.Bgr24.BitsPerPixel + 7) / 8;
                    var imageSrc = BitmapSource.Create(width, height, 96d, 96d, PixelFormats.Bgr24, null, imageData, stride);
                    SaveImageToFile(imageSrc, System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, frame.getTimestamp().ToString() + ".png"));
                }
            }
            catch (Exception ex)
            {
            }
        }
示例#8
0
        public PhotoWindow()
        {
            InitializeComponent();
            uint maxNumFaces = 1;//最多识别图片中几张脸

            Detector = new Affdex.PhotoDetector(maxNumFaces, Affdex.FaceDetectorMode.SMALL_FACES);
            Detector.setImageListener(this);
            Detector.start();

            byte[]       bytes        = FileHelper.FileToBytes(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "timg.jpg"));
            BitmapSource bitmapSource = ImageHelper.BytesToBitmapImage(bytes);
            var          w            = bitmapSource.Width;
            var          h            = bitmapSource.Height;
            var          stride       = bitmapSource.Format.BitsPerPixel * (int)w / 8; //计算Stride

            byte[] byteList = new byte[(int)h * stride];
            bitmapSource.CopyPixels(byteList, stride, 0);
            Affdex.Frame frame = new Affdex.Frame((int)w, (int)h, byteList, Affdex.Frame.COLOR_FORMAT.BGRA);
            Detector.process(frame);
        }
示例#9
0
        /// <summary>
        /// Draws the image captured from the camera.
        /// </summary>
        /// <param name="image">The image captured.</param>
        private void DrawCapturedImage(Affdex.Frame image)
        {
            // Update the Image control from the UI thread

            try
            {
                // Update the Image control from the UI thread
                //cameraDisplay.Source = rtb;
                pictureBox1.Image = BitmapFromSource(ConstructImage(image.getBGRByteArray(), image.getWidth(), image.getHeight()));

                // Allow N successive OnCapture callbacks before the FacePoint drawing canvas gets cleared.


                if (image != null)
                {
                    image.Dispose();
                }
            }
            catch (Exception ex)
            {
                String message = String.IsNullOrEmpty(ex.Message) ? "AffdexMe error encountered." : ex.Message;
                // ShowExceptionAndShutDown(message);
            }
        }
        public PhotoWindowAdvance()
        {
            InitializeComponent();
            uint maxNumFaces = 1;//最多识别图片中几张脸

            Detector = new Affdex.PhotoDetector(maxNumFaces, Affdex.FaceDetectorMode.SMALL_FACES);
            //Set location of the classifier data files, needed by the SDK
            Detector.setClassifierPath("C:\\Program Files\\Affectiva\\AffdexSDK\\data");
            //String newPath = System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "data"); //可以使用其它路径,但需要把Data复制过去
            //Detector.setClassifierPath(newPath);

            //跟踪一些 我们预先设置的的分类器,比如开心,讨厌等等
            Detector.setDetectAllEmotions(false);
            Detector.setDetectAllExpressions(false);
            Detector.setDetectAllEmojis(true);
            Detector.setDetectGender(true);
            Detector.setDetectGlasses(true);

            //以下为分类器
            Detector.setDetectJoy(true);
            Detector.setDetectSadness(true);
            Detector.setDetectAnger(true);
            Detector.setDetectDisgust(true);
            Detector.setDetectSurprise(true);
            Detector.setDetectFear(true);


            Detector.setImageListener(this);//设置两个监听
            Detector.setProcessStatusListener(this);
            Detector.start();


            //Bitmap bmpt = (Bitmap)System.Drawing.Image.FromFile(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "timg.jpg"));
            //bmpt.Save(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "timg_1.png"));

            //==================================================================================================================================
            byte[]       bytes        = FileHelper.FileToBytes(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory, "timg.jpg"));
            BitmapSource bitmapSource = ImageHelper.BytesToBitmapImage(bytes);


            //var imageSrc = bitmapSource;
            //System.Windows.Media.Imaging.BitmapEncoder encoder = new System.Windows.Media.Imaging.PngBitmapEncoder();
            //encoder.Frames.Add(System.Windows.Media.Imaging.BitmapFrame.Create(imageSrc));
            //using (var stream = new System.IO.FileStream(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory,
            //    "timg_new.png"), System.IO.FileMode.Create))
            //{
            //    encoder.Save(stream); 导出 bitmapsource已经存储成功 证明图片是正常导入进了bitmapsource
            //}
            //==================================================================================================================================
            var w      = bitmapSource.Width;
            var h      = bitmapSource.Height;
            var stride = bitmapSource.Format.BitsPerPixel * (int)w / 8; //计算Stride

            byte[] byteList = new byte[(int)h * stride];
            bitmapSource.CopyPixels(byteList, stride, 0);  //调用CopyPixels
            Affdex.Frame frame = new Affdex.Frame((int)w, (int)h, byteList, Affdex.Frame.COLOR_FORMAT.BGRA);
            //==================================================================================================================================
            //var len = frame.getBGRByteArrayLength();
            //byte[] imageData = frame.getBGRByteArray();
            //Console.WriteLine($"onImageCapture帧的buf len{imageData.Length} len{len}");
            //int width = frame.getWidth();
            //int height = frame.getHeight();
            //var ColorFormat = frame.getColorFormat();

            //if (imageData != null && imageData.Length > 0)
            //{
            //    var _stride = (width * System.Windows.Media.PixelFormats.Rgb24.BitsPerPixel + 7) / 8;//_stride 1791
            //    Console.WriteLine($"_stride{_stride}");
            //    var imageSrc = System.Windows.Media.Imaging.BitmapSource.Create(width, height, 96d, 96d, System.Windows.Media.PixelFormats.Bgr24,
            //        null, imageData, _stride);

            //    System.Windows.Media.Imaging.BitmapEncoder encoder = new System.Windows.Media.Imaging.PngBitmapEncoder();
            //    encoder.Frames.Add(System.Windows.Media.Imaging.BitmapFrame.Create(imageSrc));

            //    using (var stream =
            //    new System.IO.FileStream(System.IO.Path.Combine(System.AppDomain.CurrentDomain.BaseDirectory,
            //        "我是分析前.png"), System.IO.FileMode.Create))
            //    {
            //        encoder.Save(stream);
            //    }
            //}
            //==================================================================================================================================
            Detector.process(frame);
        }
示例#11
0
 public void onImageCapture(Affdex.Frame frame)
 {
     frame.Dispose();
 }
示例#12
0
 /// <summary>
 /// Handles the Image results event produced by Affdex.Detector
 /// </summary>
 /// <param name="faces">The detected faces.</param>
 /// <param name="image">The <see cref="Affdex.Frame"/> instance containing the image analyzed.</param>
 public void onImageResults(Dictionary <int, Face> faces, Affdex.Frame frame)
 {
     if (faces.Count > 0)
     {
         foreach (Face face in faces.Values)
         {
             if (face.Expressions.InnerBrowRaise > 0)
             {
                 Console.WriteLine("内侧眉毛提起" + face.Expressions.InnerBrowRaise);                                    //内侧眉毛提起
             }
             if (face.Expressions.BrowRaise > 0)
             {
                 Console.WriteLine("外侧眉毛提起" + face.Expressions.BrowRaise);                                    //外侧眉毛提起
             }
             if (face.Expressions.BrowFurrow > 0)
             {
                 Console.WriteLine("眉毛降下" + face.Expressions.BrowFurrow);                                    //眉毛降下
             }
             if (face.Expressions.EyeWiden > 0)
             {
                 Console.WriteLine("上眼睑提起" + face.Expressions.EyeWiden);                                    //上眼睑提起
             }
             if (face.Expressions.CheekRaise > 0)
             {
                 Console.WriteLine("面颊提起" + face.Expressions.CheekRaise);                                    //面颊提起
             }
             if (face.Expressions.LidTighten > 0)
             {
                 Console.WriteLine("眼睑收紧" + face.Expressions.LidTighten);                                    //眼睑收紧
             }
             if (face.Expressions.NoseWrinkle > 0)
             {
                 Console.WriteLine("鼻子起皱" + face.Expressions.NoseWrinkle);                                    //鼻子起皱
             }
             if (face.Expressions.UpperLipRaise > 0)
             {
                 Console.WriteLine("上嘴唇提起" + face.Expressions.UpperLipRaise);                                    //上嘴唇提起
             }
             if (face.Expressions.Dimpler > 0)
             {
                 Console.WriteLine("挤出酒窝" + face.Expressions.Dimpler);                                    //挤出酒窝
             }
             if (face.Expressions.LipCornerDepressor > 0)
             {
                 Console.WriteLine("嘴角下撇" + face.Expressions.LipCornerDepressor);                                         //嘴角下撇
             }
             if (face.Expressions.ChinRaise > 0)
             {
                 Console.WriteLine("下巴提起" + face.Expressions.ChinRaise);                                    //下巴提起
             }
             if (face.Expressions.LipPucker > 0)
             {
                 Console.WriteLine("嘴唇皱起" + face.Expressions.LipPucker);                                    //嘴唇皱起
             }
             if (face.Expressions.LipStretch > 0)
             {
                 Console.WriteLine("嘴唇拉伸" + face.Expressions.LipStretch);                                    //嘴唇拉伸
             }
             if (face.Expressions.LipPress > 0)
             {
                 Console.WriteLine("紧压嘴唇" + face.Expressions.LipPress);                                    //紧压嘴唇
             }
             if (face.Expressions.JawDrop > 0)
             {
                 Console.WriteLine("下巴落下" + face.Expressions.JawDrop);                                    //下巴落下
             }
             if (face.Expressions.MouthOpen > 0)
             {
                 Console.WriteLine("嘴唇张大延伸" + face.Expressions.MouthOpen);                                    //嘴唇张大延伸
             }
             if (face.Expressions.LipSuck > 0)
             {
                 Console.WriteLine("抿嘴" + face.Expressions.LipSuck);                                    //抿嘴
             }
             if (face.Expressions.EyeClosure > 0)
             {
                 Console.WriteLine("眼睛闭合" + face.Expressions.EyeClosure);                                 //眼睛闭合
             }
             //Emotions
             if (face.Emotions.Joy > 0)
             {
                 Console.WriteLine("喜悦" + face.Emotions.Joy);                             // - 喜悦
             }
             if (face.Emotions.Sadness > 0)
             {
                 Console.WriteLine("伤心" + face.Emotions.Sadness);                             // - 伤心
             }
             if (face.Emotions.Anger > 0)
             {
                 Console.WriteLine("愤怒" + face.Emotions.Anger);                             // - 愤怒
             }
             if (face.Emotions.Surprise > 0)
             {
                 Console.WriteLine("惊讶" + face.Emotions.Surprise);                             // - 惊讶
             }
             if (face.Emotions.Fear > 0)
             {
                 Console.WriteLine("恐惧" + face.Emotions.Fear);                             // - 恐惧
             }
             if (face.Emotions.Disgust > 0)
             {
                 Console.WriteLine("厌恶" + face.Emotions.Disgust);                             // - 厌恶
             }
             if (face.Emotions.Contempt > 0)
             {
                 Console.WriteLine("轻蔑" + face.Emotions.Contempt);                             // - 轻蔑
             }
             if (face.Emotions.Valence > 0)
             {
                 Console.WriteLine("效价" + face.Emotions.Valence);                             // - 效价
             }
             if (face.Emotions.Engagement > 0)
             {
                 Console.WriteLine("参与度" + face.Emotions.Engagement);                              // - 参与度
             }
         }
         Console.WriteLine($"faces.Count= { faces.Count }");
     }
 }
示例#13
0
        /// <summary>
        /// Single frame processing
        /// </summary>
        /// <param name="frame">Frame to process</param>
        public void ProcessFrame(Frame frame)
        {
            if (!IsRunning) {
                return;
            }

            int bytesPerPixel = 3;

            byte[] bytes = new byte[frame.rgba.Length * bytesPerPixel];

            // int stride = frame.w * bytesPerPixel;

            for (int y = 0; y < frame.h; y++) {
                for (int x = 0; x < frame.w; x++) {

                    int frameByteIndex = (y * (frame.w)) + x;
                    int idx = ((frame.h - y - 1) * (frame.w * bytesPerPixel)) + (x * bytesPerPixel);

                    bytes [idx] = frame.rgba [frameByteIndex].b;
                    bytes [idx + 1] = frame.rgba [frameByteIndex].g;
                    bytes [idx + 2] = frame.rgba [frameByteIndex].r;
                }

            }

            //Debug only saving of the image to a tmp file!
            //SampleImage(bytes, frame.w, frame.h);

            nativePlatform.ProcessFrame (bytes, frame.w, frame.h, frame.timestamp);
        }
示例#14
0
        private void ProcessFrame()
        {
            #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
            if (this.movie != null)
            {

                //A render texture is required to copy the pixels from the movie clip
                RenderTexture rt = RenderTexture.GetTemporary(movie.width, movie.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Default, 1);

                RenderTexture.active = rt;

                //Copy the movie texture to the render texture
                Graphics.Blit(movie, rt);

                //Read the render texture to our temporary texture
                t2d.ReadPixels(new Rect(0, 0, rt.width, rt.height), 0, 0);

                //apply the bytes
                t2d.Apply();

                //Send to the detector
                Frame frame = new Frame(t2d.GetPixels32(), t2d.width, t2d.height, Time.realtimeSinceStartup);
                detector.ProcessFrame(frame);

                RenderTexture.ReleaseTemporary(rt);
            }
            #endif
        }
示例#15
0
        public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame frame)
        {
            //if (ready)
            {
                System.Console.WriteLine("faces count");
                System.Console.WriteLine(faces.Count);

                //inputfile = detector.getName();

                foreach (KeyValuePair <int, Affdex.Face> pair in faces)
                {
                    Affdex.Face face = pair.Value;

                    //adding values to the arraylist
                    f.Add(face.Emotions.Fear);
                    a.Add(face.Emotions.Anger);
                    h.Add(face.Emotions.Joy);
                    d.Add(face.Emotions.Disgust);
                    sa.Add(face.Emotions.Sadness);
                    su.Add(face.Emotions.Surprise);


                    float[] emo = new float[6];
                    emo[0] = face.Emotions.Fear;
                    emo[1] = face.Emotions.Anger;
                    emo[2] = face.Emotions.Surprise;
                    emo[3] = face.Emotions.Joy;
                    emo[4] = face.Emotions.Sadness;
                    emo[5] = face.Emotions.Disgust;

                    progressBar1.Value = (int)face.Emotions.Anger;
                    progressBar2.Value = (int)face.Emotions.Fear;
                    progressBar4.Value = (int)face.Emotions.Surprise;
                    progressBar5.Value = (int)face.Emotions.Joy;
                    progressBar6.Value = (int)face.Emotions.Sadness;
                    progressBar7.Value = (int)face.Emotions.Disgust;



                    float engagement = face.Emotions.Engagement;

                    float dominantEmotion = emo.Max();
                    int   index           = emo.ToList().IndexOf(dominantEmotion);
                    if ((index == 0) && (emo[index] > 10))
                    {
                        filepath          = @"C:\Users\Rana\Desktop\data\Fear\" + inputfile.Substring(28, 8) + ".mp3";
                        pictureBox2.Image = Image.FromFile("C:\\Users\\Rana\\Documents\\Visual Studio 2015\\Projects\\proj4 - Copy\\proj2\\fear.png");
                        label3.Text       = "Afraid";
                    }
                    else
                    {
                        if ((index == 1) && (emo[index] > 10))
                        {
                            filepath          = @"C:\Users\Rana\Desktop\data\Anger\" + inputfile.Substring(28, 8) + ".mp3";
                            pictureBox2.Image = Image.FromFile("C:\\Users\\Rana\\Documents\\Visual Studio 2015\\Projects\\proj4 - Copy\\proj2\\angry.png");
                            label3.Text       = "Angry";
                        }
                        else
                        {
                            if ((index == 2) && (emo[index] > 10))
                            {
                                System.Console.WriteLine(inputfile.Substring(28, 4));
                                filepath          = @"C:\Users\Rana\Desktop\data\Surprise\" + inputfile.Substring(28, 8) + ".mp3";
                                pictureBox2.Image = Image.FromFile("C:\\Users\\Rana\\Documents\\Visual Studio 2015\\Projects\\proj4 - Copy\\proj2\\surprise.png");
                                label3.Text       = "Surprised";
                            }
                            else
                            {
                                if ((index == 3) && (emo[index] > 10))
                                {
                                    filepath          = @"C:\Users\Rana\Desktop\data\Joy\" + inputfile.Substring(28, 8) + ".mp3";
                                    pictureBox2.Image = Image.FromFile("C:\\Users\\Rana\\Documents\\Visual Studio 2015\\Projects\\proj4 - Copy\\proj2\\happy.png");
                                    label3.Text       = "Happy";
                                }
                                else
                                {
                                    if ((index == 4) && (emo[index] > 10))
                                    {
                                        filepath          = @"C:\Users\Rana\Desktop\data\Sadness\" + inputfile.Substring(28, 8) + ".mp3";
                                        pictureBox2.Image = Image.FromFile("C:\\Users\\Rana\\Documents\\Visual Studio 2015\\Projects\\proj4 - Copy\\proj2\\sad.png");
                                        label3.Text       = "Sad";
                                    }
                                    else
                                    {
                                        if ((index == 5) && (emo[index] > 10))
                                        {
                                            filepath          = @"C:\Users\Rana\Desktop\data\Disgust\" + inputfile.Substring(28, 8) + ".mp3";
                                            pictureBox2.Image = Image.FromFile("C:\\Users\\Rana\\Documents\\Visual Studio 2015\\Projects\\proj4 - Copy\\proj2\\disgust.png");
                                            label3.Text       = "Disgusted";
                                        }
                                        else
                                        {
                                            System.Console.WriteLine(inputfile);
                                            filepath          = @"C:\Users\Rana\Desktop\data\Neutral\" + inputfile.Substring(28, 8) + ".mp3";
                                            pictureBox2.Image = Image.FromFile("C:\\Users\\Rana\\Documents\\Visual Studio 2015\\Projects\\proj4 - Copy\\proj2\\neutral.png");
                                            label3.Text       = "Neutral";
                                        }
                                    }
                                }
                            }
                        }
                    }

                    if (faces != null)
                    {
                        foreach (PropertyInfo prop in typeof(Affdex.Emotions).GetProperties())
                        {
                            float  value  = (float)prop.GetValue(face.Emotions, null);
                            string output = string.Format("{0}:{1:N2}", prop.Name, value);
                            System.Console.WriteLine(output);
                        }
                    }
                }
                frame.Dispose();
            }
        }
示例#16
0
 public void onImageCapture(Affdex.Frame frame)
 {
     DrawCapturedImage(frame);
     frame.Dispose();
 }
示例#17
0
 /// <summary>
 /// Sample an individual frame from the webcam and send to detector for processing.
 /// </summary>
 public void ProcessFrame()
 {
     #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
     if (webcamTexture != null)
     {
         if (detector.IsRunning)
         {
             if(webcamTexture.isPlaying)
             {
                 Frame frame = new Frame(webcamTexture.GetPixels32(), webcamTexture.width, webcamTexture.height, Time.realtimeSinceStartup);
                 detector.ProcessFrame(frame);
             }
         }
     }
     #endif
 }
示例#18
0
 public void onImageCapture(Affdex.Frame image)
 {
     DrawCapturedImage(image);
 }
示例#19
0
 public void onImageResults(Dictionary <int, Affdex.Face> faces, Affdex.Frame image)
 {
     DrawData(image, faces);
 }