private void Upload_Click(object sender, EventArgs e) { Stream fileStream = null; UploadImage.Multiselect = true; if ((UploadImage.ShowDialog() == DialogResult.OK) && (fileStream = UploadImage.OpenFile()) != null) { string fileName = UploadImage.FileName; FileInfo file = new FileInfo(UploadImage.FileName); Mat ImageMat = new Mat(file.FullName); string emotions = ""; EmotionBusinessModel mod = new EmotionBusinessModel(); if (Methodology.SelectedIndex == 1) { mod = EDRIPMainDistanceAndWrinkledBased.getInstance().DetectEmotionsDistanceAndWrinkledBased(new Mat[] { ImageMat }, FDIPMAin.DetectFace(ImageMat)); } else if (Methodology.SelectedIndex == 2) { mod = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat }); } // emotions = EDRIPMainReducedDimensionsBased.getInstance().DetectEmotionsReducedDimentionsBased(new Mat[] { ImageMat }).outputResult; else if (Methodology.SelectedIndex == 3) { OpticalflowView(UploadImage); OpticalFlow.Visible = true; } else if (Methodology.SelectedIndex == 4) { EDRIPHybridMethodologyBasedMain.getInstance().testSystemForEmotionHybrid(UploadImage.FileNames); } emotions = mod.outputMessage; CvInvoke.Resize(ImageMat, ImageMat, new Size(320, 240), 0, 0, Inter.Linear); Image <Bgr, byte> I2 = mod.images[0].image.ToImage <Bgr, byte>(); GC.Collect(); Logger log = new Logger(); log.LogIntoFile(Log.logType.INFO, "message to be written"); // I2.Draw(emotions, new Point(10, 30), FontFace.HersheyPlain, 2.0, new Bgr(10, 0, 255)); imageBox1.Image = I2; } }