private static void StartClient() { th_cli = new Thread (delegate() { try { ConsoleAdditives.WriteHeader("Stream started"); Capture cap = new Capture(); while(_isRunning) { byte[] buf = cap.QueryGrayFrame().Bytes; int buflp = buf.Length/5; for(byte i=0;i<5;i++) { byte[] tbuf = new byte[buflp]; tbuf[0]=i; for(int j=1;j<buflp;j++) { tbuf[j]=buf[i*buflp+j]; } client.Send(tbuf,buflp,remoteEP); } } ConsoleAdditives.WriteHeader("Stream stoped"); } catch(Exception ex) { Console.WriteLine(ex.ToString()); } }); th_cli.Start (); }
private void YokalmaSistemi_Load(object sender, EventArgs e) { Capture capture1 = new Capture(); capture1.Start(); if (capture1 == null) { MessageBox.Show("Kamera Açılamadı"); } else { capture1.ImageGrabbed += (a, b) => { var image = capture1.RetrieveBgrFrame(); var grayimage1 = image.Convert <Gray, byte>(); HaarCascade haaryuz = new HaarCascade("haarcascade_frontalface_default.xml"); MCvAvgComp[][] Yuzler = grayimage1.DetectHaarCascade(haaryuz, 1.2, 5, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(15, 15)); MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 0.5, 0.5); foreach (MCvAvgComp yuz in Yuzler[0]) { var sadeyuz = grayimage1.Copy(yuz.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC); pic_kucuk_res.Image = sadeyuz.ToBitmap(); if (train.IsTrained) { name = train.Recognise(sadeyuz); int match_value = (int)train.Get_Eigen_Distance; image.Draw(name + " ", ref font, new Point(yuz.rect.X - 2, yuz.rect.Y - 2), new Bgr(Color.SteelBlue)); } image.Draw(yuz.rect, new Bgr(Color.Purple), 2); // textBox1.Text = name; } pic_kamera.Image = image.ToBitmap(); }; } }
private void captureButton_Click(object sender, EventArgs e) { #region if capture is not created, create it now if (_capture == null) { try { _capture = new Capture(); } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); } } #endregion if (_capture != null) { if (_captureInProgress) { //stop the capture Application.Idle -= new EventHandler(ProcessFrame); captureButton.Text = "Start Capture"; } else { //start the capture captureButton.Text = "Stop"; Application.Idle += new EventHandler(ProcessFrame); } _captureInProgress = !_captureInProgress; } }
public Form1() { InitializeComponent(); cap = new Emgu.CV.Capture(0); LoadImageData(); StartScanning(); }
public App(Boolean cameraTurn) { InitializeComponent(); images = slider.Images(); pictureBox2.Image = images[0]; if (!cameraTurn) { cameraState = false; pictureBox1.Hide(); } else { pictureBox2.Hide(); } timer = new DispatcherTimer(); timer.Tick += new EventHandler(timer1_Tick); timer.Interval = new TimeSpan(0, 0, 0, 0, 1); timer.Start(); timer2.Interval = 1000; timer2.Start(); //picturebox jest rozciągany na cały screen pictureBox1.Dock = DockStyle.Fill; // a następnie zawartosc jest rozciagana na całego pictureboxa fullScreen.EnterFullScreen(this); //pictureBox1.Hide(); capture = new Capture(); haarCascade = new HaarCascade(@"haarcascade_frontalface_alt_tree.xml"); }
private void btn_LoadFileList_Click(object sender, EventArgs e) { button2.Enabled = true; FolderBrowserDialog fbd = new FolderBrowserDialog(); if (fbd.ShowDialog() == DialogResult.OK) { this.txtBox_fileDirectory.Text = fbd.SelectedPath; this.frameNum = 0; this.curImageIndx = 0; string[] files = Directory.GetFiles(fbd.SelectedPath, "*.flv"); System.Windows.Forms.MessageBox.Show("Files found: " + files.Length.ToString(), "Message"); listFiles = new SortedList <string, string>(); foreach (string file in files) { listFiles.Add(file, file); } System.Windows.Forms.MessageBox.Show("First file: " + listFiles.Values[0].ToString(), "Message"); currentFileName = listFiles.Values[0].ToString(); if (currentFileName.Length > 4) { cap = new Emgu.CV.Capture(currentFileName); prevFrame = cap.QueryFrame(); updateFrameNumber(); textBox2.Text = currentFileName; pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap; } } }
private void run() { Image<Bgr, Byte> image = new Image<Bgr, byte>("lena.jpg"); //Read the files as an 8-bit Bgr image Capture vid = new Capture("kw.avi"); vid.FlipVertical = true; int x = 0; TimeSpan time = TimeSpan.Zero; MCvFont font = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_COMPLEX, 1.0, 1.0); using (VideoWriter vw = new VideoWriter("out3.avi", 15, 640, 480, true)) { while (vid.Grab()) { //if (++x % 1 != 0) continue; image = vid.RetrieveBgrFrame(); long detectionTime; List<Rectangle> faces = new List<Rectangle>(); List<Rectangle> eyes = new List<Rectangle>(); DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "supersmile.xml", faces, eyes, out detectionTime); foreach (Rectangle face in faces) image.Draw(face, new Bgr(Color.Red), 2); foreach (Rectangle eye in eyes) image.Draw(eye, new Bgr(Color.Blue), 2); if (eyes.Count > 0) time = time.Add(new TimeSpan(0, 0, 0, 0, 66)); //display the image image.Draw(String.Format("{0}:{1}", time.Seconds, time.Milliseconds), ref font, new Point(50, 50), new Bgr(0, 0, 255)); setimage(image); vw.WriteFrame<Bgr, Byte>(image); } } }
/// <summary> /// loading of the form /// </summary> public Form1() { try { IC = new IntrinsicCameraParameters(); } catch (Exception ex) { MessageBox.Show("Error: " + ex.Message); } InitializeComponent(); //fill line colour array Random R = new Random(); for (int i = 0; i < line_colour_array.Length; i++) { line_colour_array[i] = new Bgr(R.Next(0, 255), R.Next(0, 255), R.Next(0, 255)); } //set up cature as normal try { _Capture = new Capture(); _Capture.ImageGrabbed += new Emgu.CV.Capture.GrabEventHandler(_Capture_ImageGrabbed); _Capture.Start(); } catch (Exception ex) { MessageBox.Show("Error: " + ex.Message); } }
public ImageProcessor(FormMain mainForm, LoginService loginService, GesturesService gesturesService, TrainingBox trainBox, CursorSimulator cursorSimulator) { try { this.cursorSimulator = cursorSimulator; this.webCam = new Capture(); //Inicializa la camara this.mainForm = mainForm; this.loginService = loginService; this.gesturesService = gesturesService; this.trainBox = trainBox; this.clickTimer = new Stopwatch(); this.mouthTimer = new Stopwatch(); this.speechProcessor = new SpeechProcessor(); this.eyeDetector = new EyeDetector(); this.faceDetector = new FaceDetector(this.eyeDetector); this.cursorLoopProcessor = new CursorLoopProcessor(this.cursorSimulator); this.cursorActionProcessor = new CursorActionProcessor(); this.deactivateActionProcessor = new DeactivateActionProcessor(this.speechProcessor, this.cursorLoopProcessor); this.activateSpeechAction = new ActivateSpeechAction(); Application.Idle += new EventHandler(this.cursorLoopProcessor.Pool); } catch (Exception ex) { throw ex; } }
public frmNewCertificate(frmMain mainFrm) { InitializeComponent(); this.mainFrm = mainFrm; this.companyData = mainFrm.GetCompanyData(); comboBox1.Text = "0"; try { _capture = new Capture(); _capture.ImageGrabbed += ProcessFrame; } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); } try { _capture1 = new Capture(1); _capture1.ImageGrabbed += ProcessFrame1; } catch (NullReferenceException excpt) { //MessageBox.Show(excpt.Message); } }
public Form1() { InitializeComponent(); dataGridView1.Rows.Add(); dataGridView1.Rows.Add(); dataGridView1.RowHeadersWidth = 78; dataGridView1.Rows[0].HeaderCell.Value = "FWHM"; dataGridView1.Rows[1].HeaderCell.Value = "1/e^2"; tabControl1.SelectedIndex = 1; CvInvoke.UseOpenCL = false; try { _capture = new Capture(); _capture.ImageGrabbed += ProcessFrame; } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); } }
public LogIn() { InitializeComponent(); //Load haarcascades for face detection face = new HaarCascade("haarcascade_frontalface_default.xml"); //eye = new HaarCascade("haarcascade_eye.xml"); //Load of previus trainned faces and labels for each image string Labelsinfo = File.ReadAllText(Application.StartupPath + "/Faces/TrainedLabels.txt"); string[] Labels = Labelsinfo.Split('%'); NumLabels = Convert.ToInt16(Labels[0]); ContTrain = NumLabels; string LoadFaces; for (int tf = 1; tf < NumLabels + 1; tf++) { LoadFaces = "face" + tf + ".bmp"; trainingImages.Add(new Image <Gray, byte>(Application.StartupPath + "/Faces/" + LoadFaces)); labels.Add(Labels[tf]); } grabber = new Emgu.CV.Capture(); grabber.QueryFrame(); Application.Idle += new EventHandler(FrameGrabber); }
public Form1() { InitializeComponent(); grabber = new Emgu.CV.Capture("C:/Users/L33549.CITI/Desktop/a.avi"); grabber.QueryFrame(); frameWidth = grabber.Width; frameHeight = grabber.Height; //detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE); hsv_min = new Hsv(0, 45, 0); hsv_max = new Hsv(20, 255, 255); YCrCb_min = new Ycc(0, 129, 40); YCrCb_max = new Ycc(255, 185, 135); box = new MCvBox2D(); ellip = new Ellipse(); contourStorage = new MemStorage(); approxStorage = new MemStorage(); hullStorage = new MemStorage(); defectsStorage = new MemStorage(); tipPts = new Point[MAX_POINTS]; // coords of the finger tips foldPts = new Point[MAX_POINTS]; // coords of the skin folds between fingers depths = new float[MAX_POINTS]; // distances from tips to folds cogPt = new Point(); fingerTips = new List<Point>(); face = new CascadeClassifier("C:/Users/L33549.CITI/Desktop/AbuseAnalysis/HandGestureRecognition/HandGestureRecognition/HandGestureRecognition/haar/Original/haarcascade_hand.xml"); Application.Idle += new EventHandler(FrameGrabber); /*foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.sensor = potentialSensor; break; } } if (null != this.sensor) { // Turn on the color stream to receive color frames this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); // Allocate space to put the pixels we'll receive this.colorPixels = new byte[this.sensor.ColorStream.FramePixelDataLength]; // This is the bitmap we'll display on-screen this.colorBitmap = new WriteableBitmap(this.sensor.ColorStream.FrameWidth, this.sensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data //this.Image.Source = this.colorBitmap; // Add an event handler to be called whenever there is new color frame data this.sensor.ColorFrameReady += this.SensorColorFrameReady; // Start the sensor! this.sensor.Start(); }*/ }
private void CameraInit(int cameraIndex) { var camera = _cameras[cameraIndex]; _capture = new Capture(cameraIndex); _cameraDevice = cameraIndex; var w = _capture.GetCaptureProperty(CapProp.FrameWidth); var h = _capture.GetCaptureProperty(CapProp.FrameHeight); var currentResolution = w + "x" + h; var resolutions = VideoDeviceManager.GetAllAvailableResolution(camera).OrderByDescending(x => x.Width).ThenByDescending(x => x.Height).ToList(); resolutionsList.Items.Clear(); var strings = resolutions.Select(x => x.Width + "x" + x.Height).Distinct().ToList(); var index = -1; for (int i = 0; i < strings.Count; i++) { resolutionsList.Items.Add(strings[i]); if (strings[i].Equals(currentResolution)) { index = i; } } resolutionsList.SelectedIndex = index; }
private void CamCapture_Loaded(object sender, RoutedEventArgs e) { camera = new Capture(); //camera.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, ); //camera.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, ); if (camera != null) { if (captureInProgress) { //if camera is getting frames then stop the capture and set button Text // "Start" for resuming capture //Application.Idle -= ProcessFrame; ComponentDispatcher.ThreadIdle -= ProcessFrame; } else { //if camera is NOT getting frames then start the capture and set button // Text to "Stop" for pausing capture ComponentDispatcher.ThreadIdle += ProcessFrame; } captureInProgress = !captureInProgress; } ImageSrc = new BitmapImage(); ImageBox.Source = ImageSrc; stream = new MemoryStream(); }
public FormImageRetrieve(FormManageData formMD) { _formMD = formMD; capture = new Capture(); InitializeComponent(); Application.Idle += new EventHandler(runningFrame); }
public Program() { bool isProcessing = false; this._stopwatch = new System.Diagnostics.Stopwatch(); this._capture = new Capture(); this.components = new System.ComponentModel.Container(); this.contextMenu = new System.Windows.Forms.ContextMenu(); this.menuItem = new System.Windows.Forms.MenuItem(); this.contextMenu.MenuItems.AddRange( new System.Windows.Forms.MenuItem[] { this.menuItem }); this.menuItem.Index = 0; this.menuItem.Text = "E&xit"; this.menuItem.Click += new System.EventHandler( delegate(object sender, EventArgs e) { Application.Exit(); }); this.notifyIcon = new System.Windows.Forms.NotifyIcon(this.components); notifyIcon.Icon = new Icon("info.ico"); notifyIcon.ContextMenu = this.contextMenu; notifyIcon.Text = "Shoulder Surfer Alert"; notifyIcon.Visible = true; this._runner = new System.Threading.Timer(new System.Threading.TimerCallback( delegate(object state) { if (!(bool)state) { state = true; this.ProcessFaces(); state = false; } }), isProcessing, 0, 500); }
public RileeCapture() { InitializeComponent(); btnDrawMasterImage.Enabled = false; //initialization for recognition boxes _limgMasters.Add(imbMaster1); _limgMasters.Add(imbMaster2); _limgMasters.Add(imbMaster3); _limgMasters.Add(imbMaster4); _limgMasters.Add(imbMaster5); try { _capture = new Capture(); _capture.ImageGrabbed += ProcessFrame; _capture.Start(); tslStatus.Text = "Capture started"; //flip horizontal to natural //if ((_capture != null)&&(!_capture.FlipHorizontal)) _capture.FlipHorizontal = true; } catch (NullReferenceException ex) { tslStatus.Text = "Capture initialization failed..."; MessageBox.Show(ex.Message); } }
private void Form1_Load(object sender, EventArgs e) { capturar = new Capture(0); haarCascade1 = new HaarCascade("haarcascade_frontalface_default.xml"); timer1.Interval = 40; timer1.Enabled=true; }
public RecognitionOnPrem() { InitializeComponent(); Loaded += (s, e) => { if (grabber == null) { _faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml"); //count number of trained faces ContTrain = CommonData.TrainingImages.Count; grabber = new Capture(); grabber.QueryFrame(); } else { grabber.Start(); } }; Unloaded += (s, e) => { grabber.Stop(); }; CompositionTarget.Rendering += CompositionTarget_Rendering; }
public Form1() { InitializeComponent(); cap = new Capture(); timer1.Start(); Form1_Load(); }
public Form1() { InitializeComponent(); //try to create the capture if (_capture == null) { try { _capture = new Capture(); } catch (NullReferenceException excpt) { //show errors if there is any MessageBox.Show(excpt.Message); } } if (_capture != null) //if camera capture has been successfully created { _motionHistory = new MotionHistory( 6, //number of images to store in buffer, adjust it to fit your camera's frame rate 20, //0-255, the amount of pixel intensity change to consider it as motion pixel 1.0, //in second, the duration of motion history you wants to keep 0.05, //in second, parameter for cvCalcMotionGradient 0.5); //in second, parameter for cvCalcMotionGradient Application.Idle += new EventHandler(ProcessFrame); } }
public Form1() { InitializeComponent(); cap = new Capture(); // conexión con la primera cámara // o cap = new Capture(0); -- idéntico al anterior // o cap = new Capture(“miVideo.avi”); -- carga un vídeo }
public OgrKayit() { InitializeComponent(); updateComboBoxClass(); Capture capture = new Capture(); capture.Start(); if (capture == null) { MessageBox.Show("Kamera Açılamadı"); } else { capture.ImageGrabbed += (a, b) => { var image = capture.RetrieveBgrFrame(); var grayimage = image.Convert <Gray, byte>(); HaarCascade haaryuz = new HaarCascade("haarcascade_frontalface_default.xml"); MCvAvgComp[][] Yuzler = grayimage.DetectHaarCascade(haaryuz, 1.2, 5, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(15, 15)); MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 0.5, 0.5); foreach (MCvAvgComp yuz in Yuzler[0]) { var sadeyuz = grayimage.Copy(yuz.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC); image.Draw(yuz.rect, new Bgr(Color.Red), 2); pic_kucuk_res.Image = sadeyuz.ToBitmap(); } pic_box_kamera.Image = image.ToBitmap(); }; } }
private void captureButton_Click(object sender, EventArgs e) { if (capture == null) { try { capture = new Capture(); } catch (NullReferenceException exception) { MessageBox.Show(exception.Message); } catch (TypeInitializationException exc) { MessageBox.Show( "Attention: You have to copy all the assemblies and native libraries from an official release of EmguCV to the directory of the demo." + Environment.NewLine + Environment.NewLine + exc); } } if (capture != null) { if (Capturing) { captureButton.Text = "Start Capturing"; Application.Idle -= DoDecoding; } else { captureButton.Text = "Stop Capturing"; Application.Idle += DoDecoding; } Capturing = !Capturing; } }
public VideoEmgu(GraphicsDevice device, int camIndex) { this.device = device; capture = new Capture(camIndex); frame = new Texture2D(device, capture.Width, capture.Height); colorData = new Color[capture.Width * capture.Height]; }
public Form1() { InitializeComponent(); //try to create the capture if (_capture == null) { try { _capture = new Capture(); } catch (NullReferenceException excpt) { //show errors if there is any MessageBox.Show(excpt.Message); } } if (_capture != null) //if camera capture has been successfully created { _motionHistory = new MotionHistory( 1.0, //in second, the duration of motion history you wants to keep 0.05, //in second, parameter for cvCalcMotionGradient 0.5); //in second, parameter for cvCalcMotionGradient Application.Idle += ProcessFrame; } }
static void Main(string[] args) { navigationMatrix = new Matrix3D(); navigationMatrix.Translate(new Vector3D(0, 100, 110)); navigationMatrix.Scale(new Vector3D((double)1 / 5, (double)1 / 5, (double)1 / 5)); displayProfile = new Bin[Bin.RANGEL, Bin.RANGEA, Bin.RANGEB]; for (int l = 0; l < Bin.RANGEL; l++) for (int a = 0; a < Bin.RANGEA; a++) for (int b = 0; b < Bin.RANGEB; b++) displayProfile[l, a, b] = new Bin(l, a, b); PopulateProfile(displayProfile, navigationMatrix); String path = Environment.CurrentDirectory + PATH_TO_VIDEO; if (!System.IO.File.Exists(path)) return; //Opens the movie file capture = new Capture(path); double fps = capture.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FPS); //Reads frame by frame Timer timer = new Timer(1000 / fps); timer.Elapsed += new ElapsedEventHandler(timer_Elapsed); timer.Start(); Console.Read(); }
public AIRecognition() { InitializeComponent(); _faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml"); Loaded += (s, e) => { _vmodel.Pictures.Clear(); _vmodel.PersonRecognized = 0; this.DataContext = _vmodel; if (grabber == null) { CommonData.LoadSavedData(); //check how many faces we already have _countFaces = CommonData.PicturesVM.Pictures.Count; grabber = new Capture(); grabber.QueryFrame(); grabber.Start(); } else { grabber.Start(); } }; Unloaded += (s, e) => { grabber.Stop(); }; CompositionTarget.Rendering += CompositionTarget_Rendering; }
private void btnKamera_Click(object sender, EventArgs e) { if (tangkap == null) { try { tangkap = new Capture(); } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); } } if (tangkap != null) { if (btnKamera.Text=="Berhenti Kamera") { btnKamera.Text = "Mulai Kamera"; Application.Idle -= ProsesMenangkapFrame; } else { btnKamera.Text = "Berhenti Kamera"; Application.Idle += ProsesMenangkapFrame; } } }
private void startPreview() { #region Create capture object if it is not already created if (_capture == null) { try { _capture = new Capture(); } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); } } #endregion #region Start the capture process and display in the preview window if (_capture != null) { //start the capture //Application.Idle += ProcessFrame; captureEnabled = true; System.Threading.Thread capThread = new System.Threading.Thread(new System.Threading.ThreadStart(captureThread)); capThread.Start(); } #endregion }
public Camera(int num) { lens = new Capture(num); lens.ImageGrabbed += this.Process; lens.Start(); }
private void button1_Click(object sender, EventArgs e) { #region if capture is not created, create it now if (capture == null) { try { capture = new Capture(); } catch (NullReferenceException excpt) { MessageBox.Show(excpt.Message); } } #endregion if (capture != null) { if (captureInProgress) { //if camera is getting frames then stop the capture and set button Text // "Start" for resuming capture button1.Text = "Start!"; // Application.Idle -= ProcessFrame; } else { //if camera is NOT getting frames then start the capture and set button // Text to "Stop" for pausing capture button1.Text = "Stop"; Application.Idle += ProcessFrame; } captureInProgress = !captureInProgress; } }
public void StartStreaming() { grabber = new Capture(); grabber.QueryFrame(); Application.Idle += new EventHandler(FrameGrabber); //All.Enabled = false; }
public FormTrain(Form1 frm1,Classifier_Train cls) { InitializeComponent(); _form1 = frm1; eigenRecog = cls; face = new HaarCascade("haarcascade_frontalface_default.xml"); eyeWithGlass = new CascadeClassifier("haarcascade_eye_tree_eyeglasses.xml"); mydb = new DBConn(); minEye = new Size(10, 10); maxEye = new Size(225, 225); font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.5d, 0.5d); captureT = new Capture(); if (File.ReadAllText("setting.txt") != null) { folderPath = File.ReadAllText("setting.txt"); } else { FolderBrowserDialog b = new FolderBrowserDialog(); b.Description = "Please select your installation path"; DialogResult r = b.ShowDialog(); if (r == DialogResult.OK) // Test result. { folderPath = b.SelectedPath; Console.WriteLine(folderPath); File.WriteAllText(@"setting.txt", folderPath); MessageBox.Show("Path is at " + folderPath); } } initializeCombobox(); Application.Idle += new EventHandler(TrainFrame); }
private void BTn_Detect_Click(object sender, RoutedEventArgs e) { grabberRecognition = new Emgu.CV.Capture(1); //when click camera wil be opened //initializing the grabber event grabberRecognition.QueryFrame(); //Now to capture the video ComponentDispatcher.ThreadIdle += new EventHandler(FrameGrabberRecognition); //if the application is idel and the camera is on then call the frame grabber event }
public void ThreadMain() { //MessageBox.Show("Hi from the thread!"); VideoWriter writer = new VideoWriter("video.mp4", 60, new Size(1280, 720), true); int frame = 0; Capture cap = new Emgu.CV.Capture(@"C:\Users\Peter Husman\Downloads\Wildlife.wmv"); Mat minions = new Capture(@"C:\Users\Peter Husman\Downloads\maxresdefault.jpg").QueryFrame(); Mat data = new Mat(); Mat chroma = new Mat(); Mat threshold = new Mat(); Mat bNot = new Mat(); Mat minionsMask = new Mat(); Mat vidMask = new Mat(); var filter = new BackgroundSubtractorMOG(); while (true) { try { cap.Grab(); bool grabbed = cap.Retrieve(data); CvInvoke.InRange(minions, new ScalarArray(new Emgu.CV.Structure.MCvScalar(0, 206, 0)), new ScalarArray(new Emgu.CV.Structure.MCvScalar(129, 255, 164)), threshold); threshold.CopyTo(bNot); CvInvoke.BitwiseNot(bNot, bNot); Mask(minions, bNot, minionsMask); Mask(data, threshold, vidMask); CvInvoke.BitwiseOr(minionsMask, vidMask, chroma); //CvInvoke.CvtColor(data, hsv, ColorConversion.Bgr2Hsv); //BackgroundSubtractorMOG //data.Dispose(); //CvInvoke.InRange //filter.Apply(data, hsv); //ChromaKey(data, minions, chroma,min,max); //CvInvoke.Imwrite($"{fileLocation}{frame.ToString()}.jpg", data); //writer.Write(chroma); CvInvoke.Imshow("Window", chroma); CvInvoke.WaitKey(1); frame++; } catch (Exception ex) { } } }
public void StartCamera() { capture = new Emgu.CV.Capture(); capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_AUTO_EXPOSURE, 0); capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_BRIGHTNESS, 33); capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_CONTRAST, 54); capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_EXPOSURE, -7); }
private void button1_Click(object sender, EventArgs e) { //Initialize the capture device webcam = new Emgu.CV.Capture(); webcam.QueryFrame(); //Initialize the FrameGraber event Application.Idle += new EventHandler(Framewebcam); button1.Enabled = false; }
private void button2_Click(object sender, EventArgs e) { button1.Enabled = true; button2.Enabled = false; button3.Enabled = false; if (captura != null) { captura = null; } }
private void BTn_CaptureAgain_Click(object sender, RoutedEventArgs e) { imageCaptured = false; //faceCount = eyeCount = mouthCount = noseCount = 0; grabber = new Emgu.CV.Capture(1); //when click camera wil be opened //initializing the grabber event grabber.QueryFrame(); //Now to capture the video ComponentDispatcher.ThreadIdle += new EventHandler(FrameGrabber); //if the application is idel and the camera is on then call the frame grabber event }
public RecognatorForm(Emgu.CV.Capture capture, SqlConnection connection) { this.capture = capture; this.connection = connection; InitializeComponent(); initialLocals(); //login_textBox.Text = "sedgusev"; //passw_textBox.Text = "170396"; //ip_textBox.Text = "192.168.0.2"; //port_textBox.Text = "8080"; }
private void button1_Click(object sender, EventArgs e) { button1.Enabled = false; button2.Enabled = true; button3.Enabled = true; if (captura == null) { captura = new Emgu.CV.Capture(0); } captura.ImageGrabbed += activarWebCam; captura.Start(); }
private void BTn_Go_Click(object sender, RoutedEventArgs e) { LBl_WarningMsg.Content = ""; if (TBx_Matriculation.Text != "") { int i; if (int.TryParse(TBx_Matriculation.Text, out i)) { bool hasMat, studExists; string uniStudents = File.ReadAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/UniStudents.txt"); string[] arrayUniStudents = uniStudents.Split('%'); hasMat = Array.IndexOf(arrayUniStudents, TBx_Matriculation.Text) >= 0; if (hasMat) { string addedStudents = File.ReadAllText(AppDomain.CurrentDomain.BaseDirectory + "/TrainedFaces/TrainedNames.txt"); string[] arrayaddedStudents = addedStudents.Split('%'); studExists = Array.IndexOf(arrayaddedStudents, TBx_Matriculation.Text) >= 0; LBl_WarningMsg.Content = ""; if (!studExists) { try { TBx_Nose.Visibility = TBx_Eyes.Visibility = TBx_Face.Visibility = TBx_Mouth.Visibility = BTn_AddStudent.Visibility = BTn_CaptureAgain.Visibility = Visibility.Visible; grabber = new Emgu.CV.Capture(1); //when click camera wil be opened grabber.QueryFrame(); //Now to capture the video ComponentDispatcher.ThreadIdle += new EventHandler(FrameGrabber); //if the application is idel and the camera is on then call the frame grabber event } catch (Exception ex) { MessageBox.Show(ex.Message); } } else { LBl_WarningMsg.Content = "Student already added in the Attendance System"; } } else { LBl_WarningMsg.Content = "Student is not enrolled in the university"; } } else { LBl_WarningMsg.Content = "Characters otherthan numbers not allowed!"; } } else { LBl_WarningMsg.Content = "Matriculation number can not be empty!"; } }
/// <summary> /// Init a capture to image from the camera with a url stream /// </summary> /// <param name="cap">The capture</param> /// <param name="e">Image Grabbed event</param> /// <param name="url">The url stream</param> private void InitCapture(Capture cap, EventHandler e, String url) { // Init capture try { // Config the url stream cap = new Emgu.CV.Capture(url); cap.ImageGrabbed += e; } catch { MessageBox.Show("Fail: " + url); } }
/// <summary> /// Initialize of the capture in the array /// </summary> /// <param name="index">The index of the item in the array</param> private void InitCapture(int index, String url) { try { // Config the url stream listCapture[index] = new Emgu.CV.Capture(url); listCapture[index].ImageGrabbed += ProcessFrame; counter = index; } catch { MessageBox.Show("Fail!"); } }
private void btnWebcam_Click(object sender, EventArgs e) { if (btnWebcam.Text == "WEBCAM") { Cursor = Cursors.WaitCursor; imgCapture = new Emgu.CV.Capture(); timImage.Enabled = true; btnWebcam.Text = "CAPTURE"; Cursor = Cursors.Arrow; } else { timImage.Enabled = false; btnWebcam.Text = "WEBCAM"; imgCapture.Dispose(); } }
private Mat GetNextFile() { curImageIndx++; b_listFinished = true; if (curImageIndx < listFiles.Count && curImageIndx >= 0) { b_listFinished = false; currentFileName = listFiles.Values[curImageIndx]; if (currentFileName.Length > 4) { if (cap != null) { cap.Dispose(); cap = null; GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced); GC.WaitForFullGCComplete(); } cap = new Emgu.CV.Capture(currentFileName); //System.Threading.Thread.Sleep(1000); //while(true) try { nextFrame = cap.QueryFrame(); //break; } catch (CvException cf) { /// Занятый ресурс - наведена мышка cap.Dispose(); cap = null; GC.Collect(); //Emgu.CV.Capture.CaptureModuleType. cap = new Emgu.CV.Capture(currentFileName); string str = cf.ErrorMessage; } //updateFrameNumber(); textBox2.Text = currentFileName; if (!b_AutoRun) { pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap; } } } return(nextFrame); }
private void 開啟攝影機() // 開啟攝影機方法 { //如果webcam沒啟動 if (cap == null) { try { //打開預設的webcam cap = new Capture(1); cap.FlipHorizontal = !cap.FlipHorizontal; //消除鏡像 } catch { MessageBox.Show("請連結攝影機"); return; } } }
private void StartCapture() { try { _cameraCapture = new Capture(); } catch (Exception e) { MessageBox.Show(e.Message); return; } if (_cameraCapture != null) { _cameraCapture.ImageGrabbed += _cameraCapture_ImageGrabbed; _cameraCapture.Start(); startBtn.Text = "Stop"; } }
private void button1_Click(object sender, EventArgs e) { button2.Enabled = true; OpenFileDialog openPic = new OpenFileDialog(); //openPic.Filter = ".avi"; if (openPic.ShowDialog() == DialogResult.OK) { currentFileName = openPic.FileName; this.frameNum = 0; if (currentFileName.Length > 4) { cap = new Emgu.CV.Capture(currentFileName); prevFrame = cap.QueryFrame(); updateFrameNumber(); textBox2.Text = currentFileName; pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap; } } }
public YokalmaSistemi() { InitializeComponent(); list_view_var_olanlar.Columns.Add("Adı Soyadı", 100); ComboBoxUpdate(); Capture capture = new Capture(); capture.Start(); if (capture == null) { MessageBox.Show("Kamera Açılamadı"); } else { capture.ImageGrabbed += (a, b) => { var image = capture.RetrieveBgrFrame(); var grayimage = image.Convert <Gray, byte>(); HaarCascade haaryuz = new HaarCascade("haarcascade_frontalface_default.xml"); MCvAvgComp[][] Yuzler = grayimage.DetectHaarCascade(haaryuz, 1.2, 5, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(15, 15)); MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 0.5, 0.5); foreach (MCvAvgComp yuz in Yuzler[0]) { var sadeyuz = grayimage.Copy(yuz.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC); pic_kucuk_res.Image = sadeyuz.ToBitmap(); if (train.IsTrained) { name = train.Recognise(sadeyuz); int match_value = (int)train.Get_Eigen_Distance; image.Draw(name + " ", ref font, new Point(yuz.rect.X - 2, yuz.rect.Y - 2), new Bgr(Color.SteelBlue)); } image.Draw(yuz.rect, new Bgr(Color.Purple), 2); } pic_kamera.Image = image.ToBitmap(); }; } }
/// <summary> /// OpenCamera /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void 打开相机ToolStripMenuItem_Click(object sender, EventArgs e) { capture = new Emgu.CV.Capture(0); task = new Thread(showCaptureImage); task.Start(); }
//Кнопка "Начать работу алгоритма" private void start_Click(object sender, EventArgs e) { //прячу и показываю необходимые кнопки start.Visible = false; metroButton1.Visible = true; label1.Visible = true; textName.Visible = true; metroComboBox1.Visible = true; metroCheckBox1.Visible = true; metroLabel1.Visible = true; metroLabel2.Visible = true; metroTrackBar1.Visible = true; metroLabel4.Visible = true; metroLabel3.Visible = true; metroLabel5.Visible = true; metroLabel6.Visible = true; metroLabel7.Visible = false; metroButton2.Visible = true; metroButton5.Visible = true; metroButton6.Visible = true; //проверяю, существует ли камера и подключаюсь к ней, показываю её и запускаю процедуру try { camera = new Emgu.CV.Capture(); camera.QueryFrame(); Application.Idle += new EventHandler(FrameProcedure); } catch (Exception) { //вывод сообщения и закрытие приложения, если всё плохо var camfail = MessageBox.Show("Похоже, что камера не была обнаружена. Вы уверены, что камера подключена и стабильно работает?", "Камера не обнаружена", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (camfail == DialogResult.Yes) { try { camera = new Emgu.CV.Capture(); camera.QueryFrame(); Application.Idle += new EventHandler(FrameProcedure); } catch { } } if (camfail == DialogResult.No) { MessageBox.Show("Попробуйте перезапустить компьютер. Если ошибка не исчезла, сообщите на почту: [email protected]", "Спасибо", MessageBoxButtons.OK, MessageBoxIcon.None); DirectoryInfo di = new DirectoryInfo(Application.StartupPath + "/Faces/"); try { foreach (FileInfo file in di.EnumerateFiles()) { try { file.Delete(); } catch (Exception) { } } foreach (DirectoryInfo dir in di.EnumerateDirectories()) { try { dir.Delete(true); } catch (Exception) { } } //чистка всех переменных di.Delete(); Count = 0; trainingImames.Clear(); labels.Clear(); name = null; metroComboBox1.Items.Clear(); Environment.Exit(0); Application.Exit(); } catch { //повторная чистка и закрытие приложения Count = 0; trainingImames.Clear(); labels.Clear(); name = null; metroComboBox1.Items.Clear(); Close(); Environment.Exit(0); Application.Exit(); } Close(); Environment.Exit(0); Application.Exit(); } } }
public static void Reinitialize() { ACTUAL_CAMERA_INDEX = CaptureSettings.Instance().DEFAULT_CAMERA_INDEX; camera = new Emgu.CV.Capture(ACTUAL_CAMERA_INDEX); }
void timer_Tick(object sender, EventArgs e) { string id = getID(), text = GetMACAddress(); SqlConnection con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); SqlCommand cmd; SqlDataReader dr; cmd = new SqlCommand("Select Control from table1 where Id='" + id + "'", con); con.Open(); dr = cmd.ExecuteReader(); dr.Read(); if (Convert.ToString(dr[0]) == "2") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); fullScreen(); } else if (Convert.ToString(dr[0]) == "5") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); logoff(); } else if (Convert.ToString(dr[0]) == "4") { con.Close(); this.WindowState = FormWindowState.Minimized; con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); } else if (Convert.ToString(dr[0]) == "3") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); screenCapture(true); this.Close(); Process.Start("PCBC.exe"); } else if (Convert.ToString(dr[0]) == "6") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); Process.Start("shutdown", "/s /t 0"); } else if (Convert.ToString(dr[0]) == "7") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); grabber = new Emgu.CV.Capture(); grabber.QueryFrame(); Web(); this.Close(); Process.Start("PCBC.exe"); } else if (Convert.ToString(dr[0]) == "8") { con.Close(); con = new SqlConnection("workstation id=MyPDBPCC.mssql.somee.com;packet size=4096;user id=cma93_SQLLogin_1;pwd=jztlqk3kqs;data source=MyPDBPCC.mssql.somee.com;persist security info=False;initial catalog=MyPDBPCC"); cmd = new SqlCommand("Update table1 Set Control='1' where ID='" + id + "'", con); con.Open(); cmd.ExecuteNonQuery(); con.Close(); localFetchImages(); Process.Start("PCBC.exe"); } }
public ExCamera() { capture = new Capture(0); capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, size.Width); capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, size.Height); capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_AUTO_EXPOSURE, 0); }