public ProcessVideo(Affdex.Detector detector, double FPS) { InitializeComponent(); this.panel1.setDetector(detector); this.panel1.setFPS(FPS); }
public void setDetector(Affdex.Detector detector) { this.detector = detector; detector.setImageListener(this); detector.setProcessStatusListener(this); }
public AffectivaSurface(Affdex.Detector detector) { detector.setImageListener(this); detector.setProcessStatusListener(this); //InitializeComponent(); rwLock = new ReaderWriterLock(); this.DoubleBuffered = true; //this.FormBorderStyle = FormBorderStyle.FixedSingle; SetStyle(ControlStyles.AllPaintingInWmPaint | ControlStyles.UserPaint | ControlStyles.OptimizedDoubleBuffer | ControlStyles.ResizeRedraw, true); }
static void processCamera(String trainingDataFolder, bool record) { try { Affdex.Detector detector = null; System.Console.WriteLine("Trying to process a camera feed..."); double FPS = 30.0D; uint faceNo = 1; Affdex.FaceDetectorMode faceLarge = Affdex.FaceDetectorMode.LARGE_FACES; detector = new Affdex.CameraDetector(0, FPS, FPS, faceNo, faceLarge); if (detector != null) { System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); customCulture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; string pV = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); if (Environment.OSVersion.Version.Major >= 6) { pV = Directory.GetParent(pV).ToString(); } string Fname = Path.Combine(@pV, "video_" + DateTime.Now.ToString("yyyyMMdd_HHmmss") + ".avi"); ProcessVideo videoForm = new ProcessVideo(detector, FPS); if (record) { videoForm.setOutputVideoFile(Fname); } detector.setClassifierPath(trainingDataFolder); detector.setDetectAllEmotions(true); detector.setDetectAllExpressions(true); detector.setDetectAllEmojis(true); detector.setDetectAllAppearances(true); detector.start(); System.Console.WriteLine("Face detector mode = " + detector.getFaceDetectorMode().ToString()); videoForm.ShowDialog(); videoForm.Dispose(); /* * detector.stop(); * detector.Dispose(); */ } } catch (Exception ex) { Console.WriteLine("ERROR: " + ex.Message); } }
public ProcessVideo(Affdex.Detector detector) { System.Console.WriteLine("Starting Interface..."); this.detector = detector; detector.setImageListener(this); detector.setProcessStatusListener(this); InitializeComponent(); rwLock = new ReaderWriterLock(); this.DoubleBuffered = true; this.FormBorderStyle = FormBorderStyle.FixedSingle; SetStyle(ControlStyles.AllPaintingInWmPaint | ControlStyles.UserPaint | ControlStyles.OptimizedDoubleBuffer | ControlStyles.ResizeRedraw, true); }
public MainForm(Affdex.Detector detector) { InitializeComponent(); affSurface = new AffectivaSurface(detector); this.panel1.Controls.Add(affSurface); this.EmotionsTextBox.SelectAll(); this.EmotionsTextBox.SelectionAlignment = HorizontalAlignment.Right; this.EmotionsTextBox.Select(0, 0); affSurface.ResultsChanged += new System.EventHandler(this.affSurf_ResultsChanged); this.textResults_sorted_values = new float[5]; // Start Timers TextTimer.Start(); secondsTimer.Start(); }
public ProcessVideo(Affdex.Detector detector) { System.Console.WriteLine("Starting Interface..."); this.detector = detector; detector.setImageListener(this); detector.setProcessStatusListener(this); InitializeComponent(); rwLock = new ReaderWriterLock(); this.DoubleBuffered = true; this.FormBorderStyle = FormBorderStyle.FixedSingle; SetStyle(ControlStyles.AllPaintingInWmPaint | ControlStyles.UserPaint | ControlStyles.OptimizedDoubleBuffer | ControlStyles.ResizeRedraw, true); //Line added by Hifza //Affdex log contains full log from last session of emotion game //Affdex results are sent to emotion game code to process data in chunks //System.IO.File.WriteAllText(@"C:\Users\artmed\Desktop\sangwonlee\Affdex_Outputs\Affdex_Log.txt", String.Empty); //System.IO.File.WriteAllText(@"C:\Users\artmed\Documents\sangwonlee\Affdex_Outputs\Affdex_Results.txt", String.Empty); }
private void startWebCamProsessing() { try { const int cameraId = 1; const int numberOfFaces = 1; const int cameraFPS = 15; const int processFPS = 15; Detector = new Affdex.CameraDetector(cameraId, cameraFPS, processFPS, numberOfFaces, Affdex.FaceDetectorMode.LARGE_FACES); Detector.setClassifierPath("C:\\Program Files\\Affectiva\\AffdexSDK\\data"); Detector.setDetectAllEmotions(true); Detector.setDetectGender(true); Detector.setImageListener(this); Detector.start(); } catch (Affdex.AffdexException ex) { MessageBox.Show("Ocurrió un error al inicializar el analizador: " + ex.Message); } }
/*private Affdex.Frame LoadFrameFromFile(string fileName) * { * Bitmap bitmap = new Bitmap(fileName); * * // Lock the bitmap's bits. * Rectangle rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height); * BitmapData bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, bitmap.PixelFormat); * * // Get the address of the first line. * IntPtr ptr = bmpData.Scan0; * * // Declare an array to hold the bytes of the bitmap. * int numBytes = bitmap.Width * bitmap.Height * 3; * byte[] rgbValues = new byte[numBytes]; * * int data_x = 0; * int ptr_x = 0; * int row_bytes = bitmap.Width * 3; * * // The bitmap requires bitmap data to be byte aligned. * // http://stackoverflow.com/questions/20743134/converting-opencv-image-to-gdi-bitmap-doesnt-work-depends-on-image-size * * for (int y = 0; y < bitmap.Height; y++) * { * Marshal.Copy(ptr + ptr_x, rgbValues, data_x, row_bytes);//(pixels, data_x, ptr + ptr_x, row_bytes); * data_x += row_bytes; * ptr_x += bmpData.Stride; * } * * bitmap.UnlockBits(bmpData); * * return new Affdex.Frame(bitmap.Width, bitmap.Height, rgbValues, Affdex.Frame.COLOR_FORMAT.BGR); * }*/ private void restart_mode() { if (initialized) { //if (videoSurface != null) // videoSurface.Close(); Affdex.FaceDetectorMode FaceDeteMode = largeFacesCheckBox.Checked ? Affdex.FaceDetectorMode.LARGE_FACES : Affdex.FaceDetectorMode.SMALL_FACES; if (modeChecListkbox.SelectedIndex == 0) { detector = new Affdex.CameraDetector((int)camIDNumericUpDown.Value, (double)camFPSNumericUpDown.Value, (double)camFPSNumericUpDown.Value, (uint)numFacesNumericUpDown.Value, FaceDeteMode); } else if (modeChecListkbox.SelectedIndex == 1) { detector = new Affdex.PhotoDetector((uint)numFacesNumericUpDown.Value, FaceDeteMode); } else if (modeChecListkbox.SelectedIndex == 2) { detector = new Affdex.VideoDetector(30, (uint)numFacesNumericUpDown.Value, FaceDeteMode); } //detector.setClassifierPath("C:\\Program Files\\Affectiva\\AffdexSDK\\data"); detector.setClassifierPath(Directory.GetCurrentDirectory() + "\\affectiva_SDK_data"); detector.setDetectAllEmotions(true); detector.setDetectAllExpressions(false); detector.setDetectAllEmojis(false); detector.setDetectAllAppearances(false); detector.start(); //System.Console.WriteLine("Face detector mode = " + detector.getFaceDetectorMode().ToString()); //if (modeCheckbox.SelectedIndex == 2) ((Affdex.VideoDetector)detector).process(options.Input); //else if (modeCheckbox.SelectedIndex == 1) ((Affdex.PhotoDetector)detector).process(LoadFrameFromFile(options.Input)); MainForm affForm = new MainForm(detector); affForm.Show(); //detector.stop(); } }
private void Window_Closing(object sender, System.ComponentModel.CancelEventArgs e) { if (_colorReader != null) { _colorReader.Dispose(); } if (_bodyReader != null) { _bodyReader.Dispose(); } if (_sensor != null) { this._sensor.Close(); this._sensor = null; } if (this.convertStream != null) { this.convertStream.SpeechActive = false; } if (this.speechEngine != null) { this.speechEngine.SpeechRecognized -= this.SpeechEngine_SpeechRecognized; this.speechEngine.SpeechRecognitionRejected -= this.SpeechEngine_SpeechRecognitionRejected; this.speechEngine.RecognizeAsyncStop(); } if ((Detector != null) && (Detector.isRunning())) { Detector.stop(); Detector.Dispose(); Detector = null; } }
public Form1(Affdex.Detector detector) { detector.setImageListener(this); InitializeComponent(); }
static void Main(string[] args) { CmdOptions options = new CmdOptions(); if (CommandLine.Parser.Default.ParseArguments(args, options)) { Affdex.Detector detector = null; List <string> imgExts = new List <string> { ".bmp", ".jpg", ".gif", ".png", ".jpe" }; List <string> vidExts = new List <string> { ".avi", ".mov", ".flv", ".webm", ".wmv", ".mp4" }; bool isCamera = (options.Input.ToLower() == "camera"); bool isImage = imgExts.Any <string>(s => (options.Input.Contains(s) || options.Input.Contains(s.ToUpper()))); bool isVideo = (!isImage && !isCamera); if (isCamera) { System.Console.WriteLine("Trying to process a camera feed..."); detector = new Affdex.CameraDetector(0, 30, 30, (uint)options.numFaces, (Affdex.FaceDetectorMode)options.faceMode); } else if (isImage) { System.Console.WriteLine("Trying to process a bitmap image..." + options.Input.ToString()); detector = new Affdex.PhotoDetector((uint)options.numFaces, (Affdex.FaceDetectorMode)options.faceMode); } else if (isVideo) { System.Console.WriteLine("Trying to process a video file..." + options.Input.ToString()); detector = new Affdex.VideoDetector(15, (uint)options.numFaces, (Affdex.FaceDetectorMode)options.faceMode); } else { System.Console.WriteLine("File-Type not supported."); } if (detector != null) { ProcessVideo videoForm = new ProcessVideo(detector); detector.setClassifierPath(options.DataFolder); detector.setDetectAllEmotions(true); detector.setDetectAllExpressions(true); detector.setDetectAllEmojis(true); detector.setDetectAllAppearances(true); detector.start(); System.Console.WriteLine("Face detector mode = " + detector.getFaceDetectorMode().ToString()); if (isVideo) { ((Affdex.VideoDetector)detector).process(options.Input); } else if (isImage) { ((Affdex.PhotoDetector)detector).process(LoadFrameFromFile(options.Input)); } else { ; } videoForm.ShowDialog(); detector.stop(); //Hifza //close socket after data transfer is complete myNetworks.myNetwork.CloseClient(); } } }
static void processVideo(String pVideo, CmdOptions options) { try { Affdex.Detector detector = null; List <string> imgExts = new List <string> { ".bmp", ".jpg", ".gif", ".png", ".jpe" }; List <string> vidExts = new List <string> { ".avi", ".mov", ".flv", ".webm", ".wmv", ".mp4" }; bool isImage = imgExts.Any <string>(s => (pVideo.Contains(s) || pVideo.Contains(s.ToUpper()))); if (isImage) { System.Console.WriteLine("Trying to process a bitmap image..." + options.Input.ToString()); detector = new Affdex.PhotoDetector((uint)options.numFaces, (Affdex.FaceDetectorMode)options.faceMode); } else { System.Console.WriteLine("Trying to process a video file..." + options.Input.ToString()); detector = new Affdex.VideoDetector(60F, (uint)options.numFaces, (Affdex.FaceDetectorMode)options.faceMode); } if (detector != null) { System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); customCulture.NumberFormat.NumberDecimalSeparator = "."; System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; string pV = Directory.GetParent(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData)).FullName; if (Environment.OSVersion.Version.Major >= 6) { pV = Directory.GetParent(pV).ToString(); } Boolean research = false; StreamWriter outputFile = null; string Fname = Path.Combine(@pV, "blinkValues_" + DateTime.Now.ToString("yyMMdd_hhmmss") + ".txt"); string fileHeader = ""; fileHeader += "Blink\tEye-Aspect-Rate\t"; fileHeader += "frame"; System.Console.WriteLine(fileHeader); outputFile = new StreamWriter(Fname); outputFile.WriteLine(fileHeader); ProcessVideo videoForm = new ProcessVideo(detector, 15); videoForm.setOutputVideoFileLog(outputFile); detector.setClassifierPath(options.DataFolder); detector.setDetectAllEmotions(true); detector.setDetectAllExpressions(true); detector.setDetectAllEmojis(true); detector.setDetectAllAppearances(true); detector.start(); System.Console.WriteLine("Face detector mode = " + detector.getFaceDetectorMode().ToString()); if (isImage) { Affdex.Frame img = LoadFrameFromFile(options.Input); ((Affdex.PhotoDetector)detector).process(img); } else { ((Affdex.VideoDetector)detector).process(options.Input); } videoForm.ShowDialog(); videoForm.Dispose(); //videoForm = null; outputFile.Close(); detector.stop(); detector.Dispose(); } } catch (Exception ex) { Console.WriteLine("ERROR: " + ex.Message); } }