/** * * Initializes the UI and loads all the recording's data into RAM. * * @param string fileBaseName since there are two file for each recording (landmark- and video-data), the timestamp will be used to identify them * @param string type specifies the emotion-type * @param idx assigns an index to the DataSet */ public DataSetView(string fileBaseName, string type, int idx) { initModules(); index = idx; videoFile = fileBaseName + ".mp4"; dataFile = fileBaseName + "." + type; shortName = fileBaseName.Substring(fileBaseName.LastIndexOf("\\")); faceRecording = FaceRecording.load(dataFile); vlcControl = new Vlc.DotNet.Forms.VlcControl(); vlcControl.VlcLibDirectoryNeeded += new EventHandler <Vlc.DotNet.Forms.VlcLibDirectoryNeededEventArgs>(this.OnVlcControlNeedLibDirectory); ((System.ComponentModel.ISupportInitialize)(this.vlcControl)).EndInit(); vlcControl.ImeMode = System.Windows.Forms.ImeMode.NoControl; vlcControl.Location = new System.Drawing.Point(0, 0); arrange(); vlcControl.VlcMediaplayerOptions = null; dataImage = new Bitmap(dataPictureBox.Bounds.Width, dataPictureBox.Bounds.Height); dataPictureBox.Image = dataImage; pictureUpdate = delegate { dataPictureBox.Image = dataImage; dataPictureBox.Refresh(); }; Controls.Add(vlcControl); Controls.Add(dataPictureBox); vlcControl.SetMedia(new Uri(videoFile), null); vlcControl.Play(); vlcControl.Pause(); model.NullFace = faceRecording.getNullFace(); arrange(); }
/** * Updates the modules and their respective data and refreshes the UI to display the current frame (the ActionUnit- and Emotion-Data). */ public void udpateAndVisualizeData() { if (vlcControl.IsPlaying) { vlcControl.Pause(); } model.CurrentFace = faceRecording.getFace(currentFrame); model.NullFace = faceRecording.getNullFace(); RSModule.Init(model); foreach (RSModule rsm in microExpressionModules) { rsm.Work(null); } foreach (RSModule rsm in emotionModules) { rsm.Work(null); } monitors[0].currentValue = (int)model.Emotions[Model.Emotion.ANGER]; monitors[1].currentValue = (int)model.Emotions[Model.Emotion.JOY]; monitors[2].currentValue = (int)model.Emotions[Model.Emotion.FEAR]; monitors[3].currentValue = (int)model.Emotions[Model.Emotion.CONTEMPT]; monitors[4].currentValue = (int)model.Emotions[Model.Emotion.SADNESS]; monitors[5].currentValue = (int)model.Emotions[Model.Emotion.DISGUST]; monitors[6].currentValue = (int)model.Emotions[Model.Emotion.SURPRISE]; //Bitmap newImage = new Bitmap(Width, viewHeight); Graphics g = Graphics.FromImage(dataImage); g.Clear(System.Drawing.SystemColors.MenuBar); g.TextRenderingHint = TextRenderingHint.AntiAlias; foreach (FriggnAweseomeGraphix.MEMonitor monitor in monitors) { FriggnAweseomeGraphix.DrawMEMontior(g, monitor, false); } int yPos = (int)(gap * 1.5), yPos2 = yPos; int idx = 0; if (viewHeight != VIEW_TINY) { foreach (KeyValuePair <string, double> entry in model.AU_Values) { int xBase, yBase; if (idx++ > 7 && viewHeight != VIEW_LARGE) { yBase = yPos2; xBase = monitors[0].radius * 4 + gap + 420; yPos2 += gap; } else { yBase = yPos; yPos += gap; xBase = monitors[0].radius * 4 + gap + 100; } g.DrawString(entry.Key.Substring(entry.Key.IndexOf(".") + 1), textFont, textBrush, xBase, yBase - 5); g.DrawString((int)entry.Value + "", textFont, textBrush, xBase + 250, yBase - 5); } } g.DrawString(shortName, textFont, textBrush, monitors[0].radius * 8 + gap, 0); g.DrawString("Frame: " + currentFrame, textFont, textBrush, monitors[0].radius * 8 + gap + 250, 0); g.DrawLine(new Pen(textBrush), 0, Height - 1, Width, Height - 1); Invoke(pictureUpdate); }