public void write() { int codec = Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1'); int fps = 25; if (list_timestamps.Count > 0) { String tempvideopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[0].ToString() + ".mpg"; Capture tempcapture = new Capture(tempvideopath); fps = (int)tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS); tempcapture.Dispose(); } VideoWriter videowriter = new VideoWriter(videopath, codec, fps, 640, 480, true); for (int i = 0; i < list_timestamps.Count; i++) { videopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[i].ToString() + ".mpg"; try { Capture joincapture = new Capture(videopath); Image<Bgr, byte> frame = joincapture.QueryFrame(); for (int n = 1; n < 15; n++) joincapture.QueryFrame(); while (frame != null) { videowriter.WriteFrame(frame); frame = joincapture.QueryFrame(); } joincapture.Dispose(); // Notify main frame to update its progressbar ExportVideoProgressEventArgs e = new ExportVideoProgressEventArgs(i); DoneAppendingRallyVideoEvent(this, e); } catch (NullReferenceException) { Console.WriteLine("unreadable video file"); } } videowriter.Dispose(); }
public void processarVideo(ParametrosDinamicos parametros) { mCapture = new Capture(mNomeDoArquivo); inicializarVariaveis(); carregarParametrosNaTela(parametros); while (mImagemColorida != null) { atualizarParametros(parametros); mContadorDeFrames++; processarImagem(false); CvInvoke.WaitKey(100); // CvInvoke.cvShowImage("Imagem", mImagemColorida); desenharNaImagem(parametros); exibirImagem(false); if (mSalvarImagem) { /*CvInvoke.SaveImage(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames), mImagemColorida); EnviarImagensEmail(new Attachment(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames))); mSalvarImagem = false;*/ } mImagemColorida = mCapture.QueryFrame(); } mCapture.Dispose(); }
public RecognitionOnPrem() { InitializeComponent(); Loaded += (s, e) => { if (grabber == null) { _faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml"); //count number of trained faces ContTrain = CommonData.TrainingImages.Count; grabber = new Capture(); grabber.QueryFrame(); } else { grabber.Start(); } }; Unloaded += (s, e) => { grabber.Stop(); }; CompositionTarget.Rendering += CompositionTarget_Rendering; }
private void StartScanning() { Application.Idle += new EventHandler(delegate(object senders, EventArgs ee) { var mat = cap.QueryFrame(); Bitmap bitmap = mat.Bitmap; Image <Gray, byte> img = new Image <Gray, byte>(bitmap); MCvAvgComp[][] rectangles = img.DetectHaarCascade(_cascadeClassifier, 1.2, 0, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(20, 20)); foreach (var rect in rectangles[0]) { mat.Draw(rect.rect, new Bgr(255, 0, 0), 2); img = new Image <Gray, byte>(bitmap); string name = FaceRecognition(img); MCvFont mCvFont = new MCvFont(FONT.CV_FONT_HERSHEY_PLAIN, 20, 20); label1.Text = name; //break; } imageBox1.Image = mat; }); }
public void StartStreaming() { grabber = new Capture(); grabber.QueryFrame(); Application.Idle += new EventHandler(FrameGrabber); //All.Enabled = false; }
public LogIn() { InitializeComponent(); //Load haarcascades for face detection face = new HaarCascade("haarcascade_frontalface_default.xml"); //eye = new HaarCascade("haarcascade_eye.xml"); //Load of previus trainned faces and labels for each image string Labelsinfo = File.ReadAllText(Application.StartupPath + "/Faces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/Faces/" + LoadFaces)); labels.Add(Labels[tf]); } grabber = new Emgu.CV.Capture(); grabber.QueryFrame(); Application.Idle += new EventHandler(FrameGrabber); }
/// <summary> /// Подсчет количества кадров видео /// </summary> /// <param name="data">Информация о видео</param> /// <returns>Количество кадров</returns> public Task<int> CountFramesNumberAsync(object data) { try { if (data == null) throw new ArgumentNullException("Null data in LoadFrames"); IOData ioData = (IOData)data; string videoFileName = ioData.FileName; if (videoFileName == null || videoFileName.Length == 0) throw new ArgumentNullException("Null videoFileName in LoadFrames"); return Task.Run(() => { List<Image<Bgr, Byte>> frames = new List<Image<Bgr, byte>>(); Capture capture = new Capture(videoFileName); Image<Bgr, Byte> frame = null; int frameNumber = 0; do { frame = capture.QueryFrame(); if (frame != null) ++frameNumber; } while (frame != null); return frameNumber; }); } catch (Exception exception) { throw exception; } }
public ColorSampleForm(Capture c) { InitializeComponent(); sampleImg = c.QueryFrame(); sampleImg = sampleImg.Resize(_frameWidth, _frameHeight, true); //resize while maintaining proportion. sampleImageBox.Image = sampleImg; }
public AIRecognition() { InitializeComponent(); _faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml"); Loaded += (s, e) => { _vmodel.Pictures.Clear(); _vmodel.PersonRecognized = 0; this.DataContext = _vmodel; if (grabber == null) { CommonData.LoadSavedData(); //check how many faces we already have _countFaces = CommonData.PicturesVM.Pictures.Count; grabber = new Capture(); grabber.QueryFrame(); grabber.Start(); } else { grabber.Start(); } }; Unloaded += (s, e) => { grabber.Stop(); }; CompositionTarget.Rendering += CompositionTarget_Rendering; }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture("C:/Users/L33549.CITI/Desktop/a.avi"); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; //detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 129, 40); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); ellip = new Ellipse(); contourStorage = new MemStorage(); approxStorage = new MemStorage(); hullStorage = new MemStorage(); defectsStorage = new MemStorage(); tipPts = new Point[MAX_POINTS]; // coords of the finger tips foldPts = new Point[MAX_POINTS]; // coords of the skin folds between fingers depths = new float[MAX_POINTS]; // distances from tips to folds cogPt = new Point(); fingerTips = new List<Point>(); face = new CascadeClassifier("C:/Users/L33549.CITI/Desktop/AbuseAnalysis/HandGestureRecognition/HandGestureRecognition/HandGestureRecognition/haar/Original/haarcascade_hand.xml"); Application.Idle += new EventHandler(FrameGrabber); /*foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Turn on the color stream to receive color frames this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); // Allocate space to put the pixels we'll receive this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // This is the bitmap we'll display on-screen this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data //this.Image.Source = this.colorBitmap; // Add an event handler to be called whenever there is new color frame data this.sensor.ColorFrameReady += this.SensorColorFrameReady; // Start the sensor! this.sensor.Start(); }*/ }
private void btn_LoadFileList_Click(object sender, EventArgs e) { button2.Enabled = true; FolderBrowserDialog fbd = new FolderBrowserDialog(); if (fbd.ShowDialog() == DialogResult.OK) { this.txtBox_fileDirectory.Text = fbd.SelectedPath; this.frameNum = 0; this.curImageIndx = 0; string[] files = Directory.GetFiles(fbd.SelectedPath, "*.flv"); System.Windows.Forms.MessageBox.Show("Files found: " + files.Length.ToString(), "Message"); listFiles = new SortedList <string, string>(); foreach (string file in files) { listFiles.Add(file, file); } System.Windows.Forms.MessageBox.Show("First file: " + listFiles.Values[0].ToString(), "Message"); currentFileName = listFiles.Values[0].ToString(); if (currentFileName.Length > 4) { cap = new Emgu.CV.Capture(currentFileName); prevFrame = cap.QueryFrame(); updateFrameNumber(); textBox2.Text = currentFileName; pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap; } } }
public Admin() { InitializeComponent(); face = new HaarCascade("haarcascade_frontalface_default.xml"); Loaded += (s, e) => { this.DataContext = CommonData.PicturesVM; if (grabber == null) { CommonData.LoadSavedData(); //check how many faces we already have _countFaces = CommonData.PicturesVM.Pictures.Count; grabber = new Capture(); grabber.QueryFrame(); grabber.Start(); } else { grabber.Start(); } }; Unloaded += (s, e) => { grabber.Stop(); }; CompositionTarget.Rendering += CompositionTarget_Rendering; }
private void BTn_Detect_Click(object sender, RoutedEventArgs e) { grabberRecognition = new Emgu.CV.Capture(1); //when click camera wil be opened //initializing the grabber event grabberRecognition.QueryFrame(); //Now to capture the video ComponentDispatcher.ThreadIdle += new EventHandler(FrameGrabberRecognition); //if the application is idel and the camera is on then call the frame grabber event }
private void button1_Click(object sender, EventArgs e) { //Inicializar el dispositivo de Captura grabber = new Capture(); grabber.QueryFrame(); //Controlar el Evento de la Camara Application.Idle += new EventHandler(FrameGrabber); button1.Enabled = false; }
private void button1_Click(object sender, EventArgs e) { //Initialize the capture device webcam = new Emgu.CV.Capture(); webcam.QueryFrame(); //Initialize the FrameGraber event Application.Idle += new EventHandler(Framewebcam); button1.Enabled = false; }
private void button1_Click(object sender, EventArgs e) { //Menginisialisasi perangkat capture grabber = new Capture(); grabber.QueryFrame(); //Menginisialisasi FrameGraber Application.Idle += new EventHandler(FrameGrabber); button1.Enabled = false; }
private void InitializeEmguCv() { capture = new Capture(CaptureType.Any); background = capture .QueryFrame() .ToGrey() .GaussianBlur(new Size(11, 11)); }
private void button1_Click(object sender, EventArgs e) { //Initialize the capture device grabber = new Capture(); grabber.QueryFrame(); //Initialize the FrameGraber event Application.Idle += new EventHandler(FrameGrabber); button1.Enabled = false; }
private void BTn_CaptureAgain_Click(object sender, RoutedEventArgs e) { imageCaptured = false; //faceCount = eyeCount = mouthCount = noseCount = 0; grabber = new Emgu.CV.Capture(1); //when click camera wil be opened //initializing the grabber event grabber.QueryFrame(); //Now to capture the video ComponentDispatcher.ThreadIdle += new EventHandler(FrameGrabber); //if the application is idel and the camera is on then call the frame grabber event }
public void createCaptures() { devices = new List<Capture>(); for (int i = 0; i < 2; i++) { Capture c = new Capture(i); if (c.QueryFrame() != null) devices.Add(c); } }
public void testCam() { ImageViewer viewer = new ImageViewer(); //create an image viewer Capture capture = new Capture(); //create a camera captue Application.Idle += new EventHandler(delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) viewer.Image = capture.QueryFrame(); //draw the image obtained from camera }); viewer.ShowDialog(); //show the image viewer }
static void Main(string[] args) { LoCoMoCo MyBot = new LoCoMoCo("COM3"); // com port number var MainToken = new CancellationTokenSource(); //create token for the cancel UdpClient MainServerSocket = new UdpClient(15000); // declare a client byte[] MainDataReceived = new byte[1024]; // prepare container for received data string MainStringData = ""; Capture capture = new Capture(); // declare object for camera Image<Bgr, Byte> frame; // declare image for capture int TotalMessageCount = 0; while (true) // this while for keeping the main server "listening" { try { frame = capture.QueryFrame(); Console.WriteLine("Waiting for a UDP client..."); // display stuff IPEndPoint MainClient = new IPEndPoint(IPAddress.Any,0); // prepare a client MainDataReceived = MainServerSocket.Receive(ref MainClient); // receive packet MainStringData = Encoding.ASCII.GetString(MainDataReceived, 0, MainDataReceived.Length); // get string from packet Console.WriteLine("Response from " + MainClient.Address); // display stuff Console.WriteLine("Message " + TotalMessageCount++ + ": " + MainStringData + "\n"); // display client's string if (MainStringData.Equals("Picture")) { MainToken = new CancellationTokenSource(); // new cancellation token every iteration Task.Run(() => SendPicture(MainServerSocket, MainClient, frame), MainToken.Token); //start method on another thread } if (MainStringData.Equals("StopV")) MainToken.Cancel(); else if (MainStringData.Equals("Forward")) MyBot.forward(); else if (MainStringData.Equals("Backward")) MyBot.backward(); else if (MainStringData.Equals("Left")) MyBot.turnleft(); else if (MainStringData.Equals("Right")) MyBot.turnright(); else if (MainStringData.Equals("Stop")) MyBot.stop(); } catch (Exception e) { } } }
public static void TestCodeBook() { int learningFrames = 40; using (Capture capture = new Capture("tree.avi")) using (BGCodeBookModel<Ycc> bgmodel = new BGCodeBookModel<Ycc>()) { #region Set color thresholds values MCvBGCodeBookModel param = bgmodel.MCvBGCodeBookModel; param.modMin[0] = param.modMin[1] = param.modMin[2] = 3; param.modMax[0] = param.modMax[1] = param.modMax[2] = 10; param.cbBounds[0] = param.cbBounds[1] = param.cbBounds[2] = 10; bgmodel.MCvBGCodeBookModel = param; #endregion ImageViewer viewer = new ImageViewer(); int count = 0; EventHandler processFrame = delegate(Object sender, EventArgs e) { Image<Bgr, Byte> img = capture.QueryFrame(); if (img == null) { return; } Image<Gray, byte> mask = new Image<Gray, Byte>(img.Size); mask.SetValue(255); viewer.Text = String.Format("Processing {0}th image. {1}", count++, learningFrames > 0 ? "(Learning)" : String.Empty); using (Image<Ycc, Byte> ycc = img.Convert<Ycc, Byte>()) //using YCC color space for BGCodeBook { bgmodel.Update(ycc, ycc.ROI, mask); if (learningFrames == 0) //training is completed bgmodel.ClearStale(bgmodel.MCvBGCodeBookModel.t / 2, ycc.ROI, mask); learningFrames--; Image<Gray, Byte> m = bgmodel.ForgroundMask.Clone(); if (count == 56) { m = bgmodel.ForgroundMask.Clone(); } //m._EqualizeHist(); viewer.Image = m; //viewer.Image = img; System.Threading.Thread.Sleep(100); } img.Dispose(); }; Application.Idle += processFrame; viewer.ShowDialog(); } }
static void Main(string[] args) { /* One File string desktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop); string workingDir = @"E:\REDID\vids";//Path.Combine(desktop, "test", "Dancing_Resized"); string videoName = "pos_030.avi"; */ string workingDir = @"C:\Users\Paolo\Desktop\crowd_results\New folder"; string outDir = @"C:\Users\Paolo\Desktop\crowd_results"; int maxFrames = 1000; string[] files = Directory.GetFiles(workingDir, "*.avi"); foreach (string f in files) { string filename = Path.GetFileName(f); string outDirFile = Path.Combine(outDir, filename.Remove(filename.Length - 4)); if (!Directory.Exists(outDirFile)) Directory.CreateDirectory(outDirFile); else { Console.WriteLine("Directory already exists! OVERWRITE?"); if ( Console.ReadKey().Key == ConsoleKey.Y) { Directory.Delete(outDirFile, true); CvInvoke.cvWaitKey(100); } else Environment.Exit(1); } Capture cap = new Capture(f); string rootFile = Path.GetFileName(f); rootFile = rootFile.Remove(rootFile.Length - 4); int totFrames = (int)cap.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_COUNT); if (totFrames > maxFrames && maxFrames != 0) totFrames = maxFrames; Console.WriteLine("FILE {0}: {1} frames", rootFile, totFrames); StreamWriter sw1 = new StreamWriter(Path.Combine(outDirFile, "imageList.txt")); for (int frameNumber = 1; frameNumber < totFrames; frameNumber++) { IMG currentFrame = cap.QueryFrame(); if (currentFrame == null) continue; string fileOut = string.Format("{0}_{1}.jpg", rootFile, frameNumber.ToString("D8")); currentFrame.Save(Path.Combine(outDirFile, fileOut)); sw1.Write(fileOut + '\n'); if (frameNumber % 100 == 0) Console.WriteLine("Frame {0} written...", frameNumber); } sw1.Dispose(); } }
private void Window_Loaded(object sender, RoutedEventArgs e) { Capture capture = new Capture(7); capture.Start(); ComponentDispatcher.ThreadIdle += (o, arg) => { var img = capture.QueryFrame(); Emgu.CV.Contour<Bgr> con = new Contour<Bgr>(new MemStorage()); Display.Source = BitmapSourceConvert.ToBitmapSource(img); }; }
private void button2_Click(object sender, EventArgs e) { Capture c = new Capture(openFileDialog1.FileName); Image<Bgr, Byte> frame = null; do { frame = c.QueryFrame(); if (frame == null) break; q.Enqueue(frame.Clone()); } while (true); }
/// <summary> /// Captures a frame from camera /// </summary> private static void CaptureFrame() { try { if (CaptureSettings.Instance().MOTION_DETECTION) { Image <Bgr, byte> originalFrame = lastFrame.Clone(); lastFrame = camera.QueryFrame(); CalculateDiff(originalFrame, lastFrame); } else { lastFrame = camera.QueryFrame(); } lastFrameTime = DateTime.Now; } catch { Console.WriteLine("Unable to capture camera image"); } }
private void ShowCameraScreen(object sender, EventArgs e) { var imageFrame = _capture.QueryFrame().ToImage <Bgr, byte>(); DrawThankYouText(imageFrame, "Thank you!"); if (!_doBadge && !_doFace && !_doTracking) { DrawAttractiveText(imageFrame, "xConnect\r\n in Action"); } DrawBestFace(imageFrame); DrawBestBadge(imageFrame); DrawFrame(imageFrame); if (needRefresh) { needRefresh = false; Start(); } }
private void BTn_Go_Click(object sender, RoutedEventArgs e) { LBl_WarningMsg.Content = ""; if (TBx_Matriculation.Text != "") { int i; if (int.TryParse(TBx_Matriculation.Text, out i)) { bool hasMat, studExists; string uniStudents = File.ReadAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/UniStudents.txt"); string[] arrayUniStudents = uniStudents.Split('%'); hasMat = Array.IndexOf(arrayUniStudents, TBx_Matriculation.Text) >= 0; if (hasMat) { string addedStudents = File.ReadAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/TrainedNames.txt"); string[] arrayaddedStudents = addedStudents.Split('%'); studExists = Array.IndexOf(arrayaddedStudents, TBx_Matriculation.Text) >= 0; LBl_WarningMsg.Content = ""; if (!studExists) { try { TBx_Nose.Visibility = TBx_Eyes.Visibility = TBx_Face.Visibility = TBx_Mouth.Visibility = BTn_AddStudent.Visibility = BTn_CaptureAgain.Visibility = Visibility.Visible; grabber = new Emgu.CV.Capture(1); //when click camera wil be opened grabber.QueryFrame(); //Now to capture the video ComponentDispatcher.ThreadIdle += new EventHandler(FrameGrabber); //if the application is idel and the camera is on then call the frame grabber event } catch (Exception ex) { MessageBox.Show(ex.Message); } } else { LBl_WarningMsg.Content = "Student already added in the Attendance System"; } } else { LBl_WarningMsg.Content = "Student is not enrolled in the university"; } } else { LBl_WarningMsg.Content = "Characters otherthan numbers not allowed!"; } } else { LBl_WarningMsg.Content = "Matriculation number can not be empty!"; } }
static void Run(string file) { //load image Mat image;// = new Mat(file,LoadImageType.Color); long detectionTime; //declare rectangles for detection List<Rectangle> breast = new List<Rectangle>(); //disable cuda module bool tryUseCuda = false; bool tryUseOpenCL = true; int itr = 0; //capture video file Capture capture = new Capture(file); image = capture.QueryFrame(); while (image != null) { if (itr == 161) break; Console.WriteLine(itr++); image = capture.QueryFrame(); detect.detect1(image, "cascade.xml", breast, tryUseCuda, tryUseOpenCL,out detectionTime); //put rectangles foreach (Rectangle b in breast) CvInvoke.Rectangle(image, b, new Bgr(Color.Red).MCvScalar, 2); } capture.Dispose(); //show image /*ImageViewer.Show(image, String.Format( "Completed face and eye detection using {0} in {1} milliseconds", (tryUseCuda && CudaInvoke.HasCuda) ? "GPU" : (tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice) ? "OpenCL" : "CPU", detectionTime));*/ }
/// <summary> /// Retrieve the current frame. /// </summary> /// <returns></returns> public Image <Bgr, Byte> Frame() { lock (this) { if (this.IsConnected && _device != null) { return(_device.QueryFrame()); } else { return(null); } } }
private static double AvrLum(Capture webcam) { Image<Bgr, Byte> testFrame = webcam.QueryFrame(); Image<Bgr, Byte> cloneFrame = testFrame.Clone(); int sum = new int(); foreach (int lum in cloneFrame.Data) { sum += lum; } Double avr = sum / (cloneFrame.Height * cloneFrame.Width * 3); return avr; }
public override void OnStart() { PackageHost.WriteInfo("Package starting - IsRunning: {0} - IsConnected: {1}", PackageHost.IsRunning, PackageHost.IsConnected); string startupPath = System.Reflection.Assembly.GetExecutingAssembly().CodeBase; IDBAccess dataStore = new DBAccess("facesDB.db"); recoEngine = new RecognizerEngine( Path.Combine(Environment.CurrentDirectory, "data/facesDB.db"), Path.Combine(Environment.CurrentDirectory, "data/RecognizerEngineData.YAML")); //"/data/facesDB.db", startupPath + "/data/RecognizerEngineData.YAML"); cap = new Capture(); //cap.QueryFrame().Save(Path.Combine(Environment.CurrentDirectory, "test.bmp")); Task.Factory.StartNew(() => { while (PackageHost.IsRunning) { Rectangle[] faces; //string bla = System.Reflection.Assembly.GetExecutingAssembly(). CodeBase + "/haarcascade_frontalface_default.xml"; cascadeClassifier = new CascadeClassifier( Path.Combine(Environment.CurrentDirectory, "haarcascade_frontalface_default.xml")); using (var imageFrame = cap.QueryFrame().ToImage<Bgr, Byte>()) { if (imageFrame != null) { var grayframe = imageFrame.Convert<Gray, byte>(); faces = cascadeClassifier.DetectMultiScale(grayframe, 1.2, 10, Size.Empty); //the actual face detection happens here PackageHost.PushStateObject<Rectangle[]>("faces", faces); foreach (var face in faces) { int nameID = recoEngine.RecognizeUser(imageFrame.GetSubRect(face).Convert<Gray, byte>()); if (nameID == 0) { PackageHost.WriteWarn("unknown face"); PackageHost.PushStateObject<String>("Face", "Unknown"); } else { string name = dataStore.GetUsername(nameID); PackageHost.WriteInfo("face recognized : {0}", name); PackageHost.PushStateObject<String>("Face", name); } } } } Thread.Sleep(5000);//PackageHost.GetSettingValue<int>("RefreshRate") } }); }
private static void Main(string[] args) { var capture = new Capture(0); Native.Create(@"D:\GitHub\videotools\Debug\model\main_clm_general.txt"); var id = 0; while (capture.Grab()) { var mat = capture.QueryFrame(); Native.SessionPoint[] points; var result = Native.Process(mat, out points); Console.WriteLine(@"{0:D5}:{1}", id, Native.ToString(result)); ++id; } Native.Destroy(); }
public GeometryModel3D Create3DImage() { cap = new Capture(0); IImage frame = cap.QueryFrame(); wallPaper = new SubclassWallpaperCreater(frame); wallPaper.designWallpaper(); IImage pic = wallPaper.WallpaperImage; image3D = new Image3D(); image3D.BuildSolid(pic, ref mGeometry); cap = null; return mGeometry; }
void showCaptureImage() { lock (lock1) { while (true) { Mat a = capture.QueryFrame(); //pictureBox.Image = a.Bitmap; pictureBox.Image = a.Bitmap; currentImage = new Image <Bgr, byte>(a.Bitmap); Thread.Sleep(5); } } }
private void FrameGrabber(object sender, EventArgs e) { Users.Add(""); //Recheck at this point currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); gray = currentFrame.Convert <Gray, Byte>(); MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); foreach (MCvAvgComp f in facesDetected[0]) { result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); currentFrame.Draw(f.rect, new Bgr(Color.Red), 2); if (trainingImages.ToArray().Length != 0) { //TermCriteria for face recognition with numbers of trained images like maxIteration MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001); //Eigen face recognizer EigenObjectRecognizer recognizer = new EigenObjectRecognizer( trainingImages.ToArray(), labels.ToArray(), 3000, ref termCrit); name = recognizer.Recognize(result); //Validation = name; //Draw the label for each face detected and recognized currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.BlueViolet)); } Users.Add(""); } cameraBox.Image = currentFrame; if (!string.IsNullOrWhiteSpace(name) && name == "Lac") { Application.Idle -= new EventHandler(FrameGrabber); grabber.Dispose(); RibbonForm1 form2 = new RibbonForm1(); this.Hide(); form2.ShowDialog(); this.Close(); } names = ""; Users.Clear(); }
private void button1_Click(object sender, EventArgs e) { try { //Initialize the capture device grabber = new Capture(); grabber.QueryFrame(); //Initialize the FrameGraber event Application.Idle += FrameGrabber; button1.Enabled = false; } catch (NullReferenceException) { MessageBox.Show(@"Nu ai pornit camera sau nu ai atasat calculatorului"); } }
public void startLoad() { Capture c = new Capture(path); Image<Bgr, Byte> frame = null; do { frame = c.QueryFrame(); if (frame == null) break; Bitmap bmp = frame.ToBitmap(); byte[] payload = FrameHelper.getFrameBytes(bmp); RTPModel pkg = new RTPModel(0, frames.Count, frames.Count, payload); frames.Add(pkg.toBytes()); FrameLoaded.Invoke(this, frames.Count - 1); } while (true); }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture(@".\..\..\..\M2U00253.MPG"); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); ellip = new Ellipse(); Application.Idle += new EventHandler(FrameGrabber); }
private Mat GetNextFile() { curImageIndx++; b_listFinished = true; if (curImageIndx < listFiles.Count && curImageIndx >= 0) { b_listFinished = false; currentFileName = listFiles.Values[curImageIndx]; if (currentFileName.Length > 4) { if (cap != null) { cap.Dispose(); cap = null; GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced); GC.WaitForFullGCComplete(); } cap = new Emgu.CV.Capture(currentFileName); //System.Threading.Thread.Sleep(1000); //while(true) try { nextFrame = cap.QueryFrame(); //break; } catch (CvException cf) { /// Занятый ресурс - наведена мышка cap.Dispose(); cap = null; GC.Collect(); //Emgu.CV.Capture.CaptureModuleType. cap = new Emgu.CV.Capture(currentFileName); string str = cf.ErrorMessage; } //updateFrameNumber(); textBox2.Text = currentFileName; if (!b_AutoRun) { pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap; } } } return(nextFrame); }
public Form1() { InitializeComponent(); markedPoints = new Dictionary<string, Point[]>(); try { capture = new Capture("kinect_local_rgb_raw_synced.avi"); capture.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames, currentFrame); pictureBox.Image = capture.QueryFrame().Bitmap; frameCount = (int) capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameCount); } catch ( Exception e) { Console.WriteLine(e); } }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture(); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 131, 80); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); // ellip = new Ellipse(); _face = new HaarCascade("haarcascade_frontalface_alt_tree.xml"); eyes = new HaarCascade("haarcascade_mcs_eyepair_big.xml"); reye = new HaarCascade("haarcascade_mcs_lefteye.xml"); leye = new HaarCascade("haarcascade_mcs_righteye.xml"); label1.Hide(); }
private void button1_Click(object sender, EventArgs e) { button2.Enabled = true; OpenFileDialog openPic = new OpenFileDialog(); //openPic.Filter = ".avi"; if (openPic.ShowDialog() == DialogResult.OK) { currentFileName = openPic.FileName; this.frameNum = 0; if (currentFileName.Length > 4) { cap = new Emgu.CV.Capture(currentFileName); prevFrame = cap.QueryFrame(); updateFrameNumber(); textBox2.Text = currentFileName; pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap; } } }
private void getFrame(object sender, EventArgs arg) { try { m = capture.QueryFrame(); displayImage(m.Bitmap); if (!detectWorker.IsBusy) { if (!detectWorker.CancellationPending) { detectWorker.RunWorkerAsync(); } } } catch (NullReferenceException e) { Application.Idle -= getFrame; capture.Stop(); MessageBox.Show(e.Message); } }
void Web() { SqlConnection con; SqlCommand cmd; string id = getID(); //Get the current frame from capture device // currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); DsDevice[] _SystemCamereas = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); if (_SystemCamereas.Length > 0) { grabber.QueryFrame(); grabber.QueryFrame(); currentFrame = grabber.QueryFrame(); //draw the image obtained from camera imageBoxFrameGrabber.Image = currentFrame; imageBoxFrameGrabber.Image.Save("WebCam.jpg"); Image img = Image.FromFile("WebCam.jpg"); MemoryStream tmpStream = new MemoryStream(); img.Save(tmpStream, ImageFormat.Jpeg); byte[] imgBytes = new byte[1304140]; tmpStream.Read(imgBytes, 0, 1304140); byte[] image = null; string filepath = "WebCam.jpg"; FileStream fs = new FileStream(filepath, FileMode.Open, FileAccess.Read); BinaryReader br = new BinaryReader(fs); image = br.ReadBytes((int)fs.Length); string sql = " Update table1 Set Web = @Imgg where Id='" + id + "'"; con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); if (con.State != ConnectionState.Open) { con.Open(); } cmd = new SqlCommand(sql, con); cmd.Parameters.Add(new SqlParameter("@Imgg", image)); int x = cmd.ExecuteNonQuery(); con.Close(); } }
//процедура по сверке лица из базы с камерой private void FrameProcedure(object sender, EventArgs e) { //проверка на всякий случай try { //добавляю новое лицо, не имеющее (пока что) имени Users.Add(""); //создаю объект Frame, устанавливаю для него разрешение камеры (855х588) Frame = camera.QueryFrame().Resize(cameraBox.Width, cameraBox.Height, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //делаю изображение чёрно-белым grayFace = Frame.Convert <Gray, Byte>(); MCvAvgComp[][] facesDetectedNow = grayFace.DetectHaarCascade(faceDetected, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); //беру КАЖДОЕ фото из "базы лиц" и делаю для каждого лица следующее foreach (MCvAvgComp f in facesDetectedNow[0]) { //изменяю размер "лица", делаю его чёрно-белым result = Frame.Copy(f.rect).Convert <Gray, Byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //делаю обводку лица зелёного цвета в ширине линии = 3 Frame.Draw(f.rect, new Bgr(Color.Green), 3); //если фото из базы совпадает, то.... if (trainingImames.ToArray().Length != 0) { MCvTermCriteria termCriterias = new MCvTermCriteria(Count, 0.001); //распознаю лицо из базы (создаю объект, который распознаёт лицо) EigenObjectRecognizer recognizer = new EigenObjectRecognizer(trainingImames.ToArray(), labels.ToArray(), 1500, ref termCriterias); //вывожу имя (name) name = recognizer.Recognize(result); //делаю подпись имени лица в зависимости от координат, в которых находится зелёный "квадрат" обводки лица //т.е. отталкиваюсь от предыдущих координат лица, которые получены были выше Frame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.Red)); } //если ничего не нашёл, печатаю пустоту Users.Add(""); } //вывод найденого лица в label4 if (name == "") { metroLabel3.Text = "Лица нет в базе лиц"; metroLabel5.Text = "или"; metroLabel6.Text = "Лицо не найдено"; } else { metroLabel3.Text = name; metroLabel5.Text = ""; metroLabel6.Text = ""; } //пытаюсьь, подключена ли камера. В случае чего, вывожу ошибку try { cameraBox.Image = Frame; name = ""; Users.Clear(); } catch (Exception) { //вывод сообщения и закрытие приложения, если всё плохо var camfail = MessageBox.Show("Похоже, что камера не была обнаружена. Вы уверены, что камера подключена и стабильно работает?", "Камера не обнаружена", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (camfail == DialogResult.Yes) { cameraBox.Image = Frame; name = ""; Users.Clear(); } if (camfail == DialogResult.No) { MessageBox.Show("Попробуйте перезапустить компьютер. Если ошибка не исчезла, сообщите на почту: [email protected]", "Спасибо", MessageBoxButtons.OK, MessageBoxIcon.None); Close(); Environment.Exit(0); Application.Exit(); } } } catch (Exception) { //вывод сообщения и закрытие приложения, если всё плохо var camfail = MessageBox.Show("Похоже, что камера не была обнаружена. Вы уверены, что камера подключена и стабильно работает?", "Камера не обнаружена", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (camfail == DialogResult.Yes) { } if (camfail == DialogResult.No) { MessageBox.Show("Попробуйте перезапустить компьютер. Если ошибка не исчезла, сообщите на почту: [email protected]", "Спасибо", MessageBoxButtons.OK, MessageBoxIcon.None); Close(); Environment.Exit(0); Application.Exit(); } } }
private void GetCameraXY(System.Windows.Forms.PictureBox picturebox1, System.Windows.Forms.PictureBox picturebox2) { Image <Bgr, Byte> frame = capture.QueryFrame(); //Image<Bgr, Byte> frame = new Image<Bgr, Byte>("Capture.jpg"); if (frame != null) { Image <Gray, Byte> gray = frame.Convert <Gray, Byte>(); double cannyThreshold = 180.0; double cannyThresholdLinking = 120.0; Image <Gray, Byte> cannyEdges = gray.Canny(cannyThreshold, cannyThresholdLinking); List <Triangle2DF> triangleList = new List <Triangle2DF>(); List <MCvBox2D> boxList = new List <MCvBox2D>(); //a box is a rotated rectangle using (MemStorage storage = new MemStorage()) //allocate storage for contour approximation for ( Contour <Point> contours = cannyEdges.FindContours( Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, storage); contours != null; contours = contours.HNext) { Contour <Point> currentContour = contours.ApproxPoly(contours.Perimeter * 0.05, storage); if (currentContour.Area > 400 && currentContour.Area < 20000) //only consider contours with area greater than 250 { if (currentContour.Total == 4) //The contour has 4 vertices. { // determine if all the angles in the contour are within [80, 100] degree bool isRectangle = true; Point[] pts = currentContour.ToArray(); LineSegment2D[] edges = PointCollection.PolyLine(pts, true); for (int i = 0; i < edges.Length; i++) { double angle = Math.Abs( edges[(i + 1) % edges.Length].GetExteriorAngleDegree(edges[i])); if (angle < 80 || angle > 100) { isRectangle = false; break; } } if (isRectangle) { boxList.Add(currentContour.GetMinAreaRect()); } } } } Image <Bgr, Byte> triangleRectangleImage = frame.CopyBlank(); foreach (Triangle2DF triangle in triangleList) { triangleRectangleImage.Draw(triangle, new Bgr(Color.DarkBlue), 2); } foreach (MCvBox2D box in boxList) { frm.SetText(frm.Controls["textBoxImageY"], box.center.Y.ToString()); frm.SetText(frm.Controls["textBoxDeg"], box.angle.ToString()); frm.SetText(frm.Controls["textBoxImageX"], box.center.X.ToString()); CameraHasData = true; triangleRectangleImage.Draw(box, new Bgr(Color.DarkOrange), 2); } // add cross hairs to image int totalwidth = frame.Width; int totalheight = frame.Height; PointF[] linepointshor = new PointF[] { new PointF(0, totalheight / 2), new PointF(totalwidth, totalheight / 2) }; PointF[] linepointsver = new PointF[] { new PointF(totalwidth / 2, 0), new PointF(totalwidth / 2, totalheight) }; triangleRectangleImage.DrawPolyline(Array.ConvertAll <PointF, Point>(linepointshor, Point.Round), false, new Bgr(Color.AntiqueWhite), 1); triangleRectangleImage.DrawPolyline(Array.ConvertAll <PointF, Point>(linepointsver, Point.Round), false, new Bgr(Color.AntiqueWhite), 1); picturebox2.Image = triangleRectangleImage.ToBitmap(); frame.DrawPolyline(Array.ConvertAll <PointF, Point>(linepointshor, Point.Round), false, new Bgr(Color.AntiqueWhite), 1); frame.DrawPolyline(Array.ConvertAll <PointF, Point>(linepointsver, Point.Round), false, new Bgr(Color.AntiqueWhite), 1); picturebox1.Image = frame.ToBitmap(); } }
void Framewebcam(object sender, EventArgs e) { //label3.Text = "0"; //label4.Text = ""; NamePersons.Add(""); //Get the current frame form capture device currentFrame = webcam.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //Convert it to Grayscale gray = currentFrame.Convert <Gray, Byte>(); //Face Detector MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); //Action for each element detected foreach (MCvAvgComp f in facesDetected[0]) { t = t + 1; result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //draw the face detected in the 0th (gray) channel with blue color currentFrame.Draw(f.rect, new Bgr(Color.Red), 2); if (trainingImages.ToArray().Length != 0) { //TermCriteria for face recognition with numbers of trained images like maxIteration MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001); //Eigen face recognizer #pragma warning disable CS0436 // Type conflicts with imported type #pragma warning disable CS0436 // Type conflicts with imported type EigenObjectRecognizer recognizer = new EigenObjectRecognizer( #pragma warning restore CS0436 // Type conflicts with imported type #pragma warning restore CS0436 // Type conflicts with imported type trainingImages.ToArray(), labels.ToArray(), 3000, ref termCrit); name = recognizer.Recognize(result); //Draw the label for each face detected and recognized currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen)); } NamePersons[t - 1] = name; NamePersons.Add(""); //Set the number of faces detected on the scene label3.Text = dataGridView1.Rows.Count.ToString(); //facesDetected[0].Length.ToString(); /* * //Set the region of interest on the faces * * gray.ROI = f.rect; * MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade( * eye, * 1.1, * 10, * Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, * new Size(20, 20)); * gray.ROI = Rectangle.Empty; * * foreach (MCvAvgComp ey in eyesDetected[0]) * { * Rectangle eyeRect = ey.rect; * eyeRect.Offset(f.rect.X, f.rect.Y); * currentFrame.Draw(eyeRect, new Bgr(Color.Blue), 2); * } */ } t = 0; //Names concatenation of persons recognized for (int nnn = 0; nnn < facesDetected[0].Length; nnn++) { names = names + NamePersons[nnn]; } //Show the faces procesed and recognized imageBoxFramewebcam.Image = currentFrame; label4.Text = names; names = ""; //Clear the list(vector) of names NamePersons.Clear(); if (label4.Text != names) { OleDbCommand cmd = new OleDbCommand(); cmd.CommandText = "INSERT INTO [Table1] ([sinfo],[time]) VALUES (@info, @time)"; cmd.Connection = connection; connection.Open(); if (connection.State == System.Data.ConnectionState.Open) { cmd.Parameters.Add("@info", OleDbType.VarChar).Value = label4.Text; cmd.Parameters.Add("@time", OleDbType.VarChar).Value = DateTime.Now.ToShortTimeString(); try { cmd.ExecuteNonQuery(); dataGridView1.Rows.Add(); dataGridView1.Rows[dataGridView1.Rows.Count - 1].Cells[0].Value = dataGridView1.Rows.Count - 1; dataGridView1.Rows[dataGridView1.Rows.Count - 1].Cells[1].Value = label4.Text; dataGridView1.Rows[dataGridView1.Rows.Count - 1].Cells[2].Value = DateTime.Now.ToShortTimeString(); connection.Close(); } catch (OleDbException) { connection.Close(); } } else { MessageBox.Show("Connection Failed"); } /* * connection.Open(); * OleDbCommand comd = new OleDbCommand(); * comd.CommandText = "SELECT * FROM Table1"; * comd.Connection = connection; * connection.Open(); * cmd.ExecuteNonQuery(); * * dataGridView1.Rows.Add(); * dataGridView1.Rows[dataGridView1.Rows.Count - 1].Cells[0].Value = dataGridView1.Rows.Count - 1; * dataGridView1.Rows[dataGridView1.Rows.Count - 1].Cells[1].Value = label4.Text; * dataGridView1.Rows[dataGridView1.Rows.Count - 1].Cells[2].Value = DateTime.Now.ToShortTimeString(); */ } }
void timer_Tick(object sender, EventArgs e) { string id = getID(), text = GetMACAddress(); SqlConnection con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); SqlCommand cmd; SqlDataReader dr; cmd = new SqlCommand("Select Control from table1 where Id='" + id + "'", con); con.Open(); dr = cmd.ExecuteReader(); dr.Read(); if (Convert.ToString(dr[0]) == "2") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); fullScreen(); } else if (Convert.ToString(dr[0]) == "5") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); logoff(); } else if (Convert.ToString(dr[0]) == "4") { con.Close(); this.WindowState = FormWindowState.Minimized; con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); } else if (Convert.ToString(dr[0]) == "3") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); screenCapture(true); this.Close(); Process.Start("PCBC.exe"); } else if (Convert.ToString(dr[0]) == "6") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); Process.Start("shutdown", "/s /t 0"); } else if (Convert.ToString(dr[0]) == "7") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); grabber = new Emgu.CV.Capture(); grabber.QueryFrame(); Web(); this.Close(); Process.Start("PCBC.exe"); } else if (Convert.ToString(dr[0]) == "8") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); localFetchImages(); Process.Start("PCBC.exe"); } }
private void Timer畫面運作_Tick(object sender, EventArgs e) { Image <Bgr, Byte> 框架 = cap.QueryFrame(); pictureBox1.Image = 框架.ToBitmap(); }
void FrameGrabber(object sender, EventArgs e) { label3.Text = "0"; //label4.Text = ""; NamePersons.Add(""); //Get the current frame form capture device currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //Convert it to Grayscale gray = currentFrame.Convert <Gray, Byte>(); //Face Detector MCvAvgComp[][] facesDetected = gray.DetectHaarCascade( face, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(20, 20)); //Action for each element detected foreach (MCvAvgComp f in facesDetected[0]) { t = t + 1; result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //draw the face detected in the 0th (gray) channel with blue color currentFrame.Draw(f.rect, new Bgr(Color.Red), 2); if (trainingImages.ToArray().Length != 0) { //TermCriteria for face recognition with numbers of trained images like maxIteration MCvTermCriteria termCrit = new MCvTermCriteria(ContTrain, 0.001); //Eigen face recognizer EigenObjectRecognizer recognizer = new EigenObjectRecognizer( trainingImages.ToArray(), labels.ToArray(), 3000, ref termCrit); name = recognizer.Recognize(result); //Draw the label for each face detected and recognized currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen)); } NamePersons[t - 1] = name; NamePersons.Add(""); //Set the number of faces detected on the scene label3.Text = facesDetected[0].Length.ToString(); } t = 0; //Names concatenation of persons recognized for (int nnn = 0; nnn < facesDetected[0].Length; nnn++) { names = names + NamePersons[nnn] + ", "; } //Show the faces procesed and recognized imageBoxFrameGrabber.Image = currentFrame; label4.Text = names; names = ""; //Clear the list(vector) of names NamePersons.Clear(); }
//Кнопка "Начать работу алгоритма" private void start_Click(object sender, EventArgs e) { //прячу и показываю необходимые кнопки start.Visible = false; metroButton1.Visible = true; label1.Visible = true; textName.Visible = true; metroComboBox1.Visible = true; metroCheckBox1.Visible = true; metroLabel1.Visible = true; metroLabel2.Visible = true; metroTrackBar1.Visible = true; metroLabel4.Visible = true; metroLabel3.Visible = true; metroLabel5.Visible = true; metroLabel6.Visible = true; metroLabel7.Visible = false; metroButton2.Visible = true; metroButton5.Visible = true; metroButton6.Visible = true; //проверяю, существует ли камера и подключаюсь к ней, показываю её и запускаю процедуру try { camera = new Emgu.CV.Capture(); camera.QueryFrame(); Application.Idle += new EventHandler(FrameProcedure); } catch (Exception) { //вывод сообщения и закрытие приложения, если всё плохо var camfail = MessageBox.Show("Похоже, что камера не была обнаружена. Вы уверены, что камера подключена и стабильно работает?", "Камера не обнаружена", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (camfail == DialogResult.Yes) { try { camera = new Emgu.CV.Capture(); camera.QueryFrame(); Application.Idle += new EventHandler(FrameProcedure); } catch { } } if (camfail == DialogResult.No) { MessageBox.Show("Попробуйте перезапустить компьютер. Если ошибка не исчезла, сообщите на почту: [email protected]", "Спасибо", MessageBoxButtons.OK, MessageBoxIcon.None); DirectoryInfo di = new DirectoryInfo(Application.StartupPath + "/Faces/"); try { foreach (FileInfo file in di.EnumerateFiles()) { try { file.Delete(); } catch (Exception) { } } foreach (DirectoryInfo dir in di.EnumerateDirectories()) { try { dir.Delete(true); } catch (Exception) { } } //чистка всех переменных di.Delete(); Count = 0; trainingImames.Clear(); labels.Clear(); name = null; metroComboBox1.Items.Clear(); Environment.Exit(0); Application.Exit(); } catch { //повторная чистка и закрытие приложения Count = 0; trainingImames.Clear(); labels.Clear(); name = null; metroComboBox1.Items.Clear(); Close(); Environment.Exit(0); Application.Exit(); } Close(); Environment.Exit(0); Application.Exit(); } } }
private bool FindNextFrame() { bool b_NeedStop = false; if (currentFileName.Length > 4 && cap != null) { using (Mat image = cap.QueryFrame()) { SetPrevFrame(); Mat nextFrame = image; if (image == null) { nextFrame = GetNextFile(); } updateFrameNumber(); bool stop = false; while (nextFrame != null && !stop) { SetPrevFrame(); SkipNFrames(); if (nextFrame != null && testDiff()) { if (!b_AutoRun) { pictureBox1.Image = nextFrame.Bitmap.Clone() as Bitmap; pictureBox1.Refresh(); } stop = true; } if (b_listFinished) { break; } } if (nextFrame == null && b_listFinished) { button2.Enabled = false; MessageBox.Show("Обработка окончена"); //pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap; b_NeedStop = true; } else { SetPrevFrame(); if (!b_AutoRun) { pictureBox1.Image = nextFrame.Bitmap.Clone() as Bitmap; } } if (!b_AutoRun) { pictureBox1.Refresh(); } } } return(b_NeedStop); }
private void timImage_Tick(object sender, EventArgs e) { Emgu.CV.Image <Bgr, Byte> photo = imgCapture.QueryFrame(); picProfile.Image = photo.ToBitmap(); }
public void FrameGrabber(object sender, EventArgs e) //Frame grabber event { try { if (imageCaptured == false) { //initialize current frame with query grabber which is catching the frame currentFrame = grabber.QueryFrame().Resize(501, 407, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); //resizing the frame with cubic frame // Converting image frame to gray scale (image processing) gray = currentFrame.Convert <Gray, Byte>(); // Detecting face by using Haar Classifier MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(face, 1.2, 1, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(20, 20)); MCvAvgComp[][] eyesDetected = gray.DetectHaarCascade(eyes, 1.2, 5, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(20, 20)); MCvAvgComp[][] mouthDetected = gray.DetectHaarCascade(mouth, 1.2, 10, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(50, 50)); MCvAvgComp[][] noseDetected = gray.DetectHaarCascade(nose, 1.2, 5, Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new System.Drawing.Size(20, 20)); //face is name of the haar cascade, giving sizes to the cascade, applying canny pruning on haar classifier // Checking each frame of image processed by the classifer through ImageBox (video is processed as image frames for face detection), then detect face foreach (MCvAvgComp f in facesDetected[0]) { // If face is detected then increment t into 1 = True faceCount = faceCount + 1; // Copy detected face in a frame name as resultFace (gray.resultFace) resultFace = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); // Drawing rectangle around on detected image (face) currentFrame.Draw(f.rect, new Bgr(System.Drawing.Color.Red), 2); } foreach (MCvAvgComp f in eyesDetected[0]) { // If face is detected then increment t into 1 = True eyeCount = eyeCount + 1; // Copy detected face in a frame name as resultFace (gray.resultFace) resultEyes = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); // Drawing rectangle around on detected image currentFrame.Draw(f.rect, new Bgr(System.Drawing.Color.White), 2); } foreach (MCvAvgComp f in mouthDetected[0]) { // If face is detected then increment t into 1 = True mouthCount = mouthCount + 1; // Copy detected face in a frame name as resultFace (gray.resultFace) resultMouth = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); // Drawing rectangle around on detected image currentFrame.Draw(f.rect, new Bgr(System.Drawing.Color.Green), 2); } foreach (MCvAvgComp f in noseDetected[0]) { // If face is detected then increment t into 1 = True noseCount = noseCount + 1; // Copy detected face in a frame name as resultFace (gray.resultFace) resultNose = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC); // Drawing rectangle around on detected image currentFrame.Draw(f.rect, new Bgr(System.Drawing.Color.Blue), 2); } //View current frame in the imported ImageBox imgBox.Image = currentFrame; //current frame = captured from the camera into the imagebox if (faceCount > 0 & eyeCount > 0 & mouthCount > 0 & noseCount > 0) { ExtractFeatures(); } } } catch (Exception ex) { MessageBox.Show("Press Ok to Continue"); } }