public void write() {
            int codec = Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1');

            int fps = 25;
            if (list_timestamps.Count > 0)
            {
                String tempvideopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[0].ToString() + ".mpg";
                Capture tempcapture = new Capture(tempvideopath);
                fps = (int)tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
                tempcapture.Dispose();
            }

            VideoWriter videowriter = new VideoWriter(videopath, codec, fps, 640, 480, true);
            

            for (int i = 0; i < list_timestamps.Count; i++)
            {
                videopath = Program.getConfiguration().Mediafolderpath + @"\" + list_timestamps[i].ToString() + ".mpg";
                try
                {
                    Capture joincapture = new Capture(videopath);
                    Image<Bgr, byte> frame = joincapture.QueryFrame();
                    for (int n = 1; n < 15; n++)
                        joincapture.QueryFrame();

                    while (frame != null)
                    {
                        videowriter.WriteFrame(frame);
                        frame = joincapture.QueryFrame();
                    }
                    joincapture.Dispose();

                    // Notify main frame to update its progressbar
                    ExportVideoProgressEventArgs e = new ExportVideoProgressEventArgs(i);
                    DoneAppendingRallyVideoEvent(this, e);
                }
                catch (NullReferenceException) { Console.WriteLine("unreadable video file"); }
            }
            videowriter.Dispose();
        
        }
Exemple #2
0
        private void okBtn_Click(object sender, EventArgs e)
        {
            Application.Idle -= new EventHandler(FrameGrabber);
            grabber.Dispose();
            RibbonForm1 form2 = new RibbonForm1();

            this.Hide();
            form2.ShowDialog();
            this.Close();
        }
        public void processarVideo(ParametrosDinamicos parametros)
        {
            mCapture = new Capture(mNomeDoArquivo);

            inicializarVariaveis();
            carregarParametrosNaTela(parametros);

            while (mImagemColorida != null)
            {
                atualizarParametros(parametros);
                mContadorDeFrames++;
                processarImagem(false);
                CvInvoke.WaitKey(100);
               // CvInvoke.cvShowImage("Imagem", mImagemColorida);
                desenharNaImagem(parametros);
                exibirImagem(false);

                if (mSalvarImagem)
                {
                    /*CvInvoke.SaveImage(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames), mImagemColorida);
                    EnviarImagensEmail(new Attachment(String.Format(@"C:\Users\Tadeu Rahian\Dropbox\Dropbox\UFMG\PFC1\Imagens\mImagemColorida{0}.jpg", mContadorDeFrames)));
                    mSalvarImagem = false;*/
                }
                mImagemColorida = mCapture.QueryFrame();
            }

            mCapture.Dispose();
        }
Exemple #4
0
 protected override void DisposeManaged()
 {
     if (_device != null)
     {
         _device.Dispose();
     }
 }
Exemple #5
0
 /// <summary>
 /// Disposes current camera
 /// </summary>
 public static void DisposeCamera()
 {
     if (camera != null)
     {
         camera.Dispose();
     }
 }
Exemple #6
0
 private void 关闭相机ToolStripMenuItem_Click(object sender, EventArgs e)
 {
     if (this.camp != null)
     {
         camp.Close();
     }
     task.Abort();
     capture.ImageGrabbed -= Capture_ImageGrabbed;
     capture.Stop();
     capture.Dispose();
 }
        //filename: Catch image from video file
        //ms: Capture every ms. ms = 0 means capture all frames. (24 frames per second)
        public List<string> CatchImages(string fileName, int ms, string outputPath)
        {
            _log.Debug("Start to capture");
            if (string.IsNullOrWhiteSpace(fileName) || string.IsNullOrWhiteSpace(outputPath))
            {
                _log.Error("Cannot catch images from path: " + fileName + " and output to: " + outputPath);
            }
            //List<Image<Bgr, Byte>> imageList = new List<Image<Bgr, Byte>>();
            List<string> imagePath = new List<string>();
            Capture capture = new Capture(fileName);

            double frameCount = capture.GetCaptureProperty(CapProp.FrameCount);
            capture.Dispose();

            int index = 0;
            int fc = (int)frameCount;
            Mat mat = null;
            try
            {
                //TODO: Modified this to change period of capture image.
                while (index < 30/*fc*/)
                {
                    index++;
                    using (capture = new Capture(fileName))
                    {
                        capture.SetCaptureProperty(CapProp.PosFrames, (double)index);

                        using (mat = capture.QueryFrame())
                        {
                            string indexStr = index < 10 ? "0" + index : index.ToString();
                            string imgPath = outputPath + "\\" + indexStr;
                            if (!Directory.Exists(outputPath))
                            {
                                Directory.CreateDirectory(outputPath);
                            }
                            //long quality = 60;
                            //saveJpeg(imgPath, mat.Bitmap, quality);
                            string grayImgName = saveGrayAndThreshold(imgPath, mat.Bitmap);
                            if (!string.IsNullOrEmpty(grayImgName))
                            {
                                imagePath.Add(grayImgName);
                            }
                        }
                    }
                }
            }
            catch (System.Exception ex)
            {
                _log.Error("Exception:", ex);
            }

            return imagePath;
        }
 private void btnWebcam_Click(object sender, EventArgs e)
 {
     if (btnWebcam.Text == "WEBCAM")
     {
         Cursor           = Cursors.WaitCursor;
         imgCapture       = new Emgu.CV.Capture();
         timImage.Enabled = true;
         btnWebcam.Text   = "CAPTURE";
         Cursor           = Cursors.Arrow;
     }
     else
     {
         timImage.Enabled = false;
         btnWebcam.Text   = "WEBCAM";
         imgCapture.Dispose();
     }
 }
Exemple #9
0
        private Mat GetNextFile()
        {
            curImageIndx++;
            b_listFinished = true;
            if (curImageIndx < listFiles.Count && curImageIndx >= 0)
            {
                b_listFinished  = false;
                currentFileName = listFiles.Values[curImageIndx];
                if (currentFileName.Length > 4)
                {
                    if (cap != null)
                    {
                        cap.Dispose();
                        cap = null;
                        GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced);
                        GC.WaitForFullGCComplete();
                    }

                    cap = new Emgu.CV.Capture(currentFileName);
                    //System.Threading.Thread.Sleep(1000);
                    //while(true)
                    try
                    {
                        nextFrame = cap.QueryFrame();
                        //break;
                    }
                    catch (CvException cf) {
                        /// Занятый ресурс - наведена мышка
                        cap.Dispose();
                        cap = null;
                        GC.Collect();
                        //Emgu.CV.Capture.CaptureModuleType.
                        cap = new Emgu.CV.Capture(currentFileName);
                        string str = cf.ErrorMessage;
                    }
                    //updateFrameNumber();
                    textBox2.Text = currentFileName;
                    if (!b_AutoRun)
                    {
                        pictureBox1.Image = prevFrame.Bitmap.Clone() as Bitmap;
                    }
                }
            }
            return(nextFrame);
        }
        public async void StartCapture()
        {
            // Can only access the first camera without CL Eye SDK
            if (_camera.TrackerId == 0 && !_camera.Design)
            {
                _capture = new Capture(_camera.TrackerId);
                _ctsCameraCalibration = new CancellationTokenSource();
                CancellationToken token = _ctsCameraCalibration.Token;

                _capture.Start();
                try
                {
                    // needed to avoid bitmapsource access violation?
                    _captureTask = Task.Run(() =>
                    {
                        while (!token.IsCancellationRequested)
                        {
                            ImageGrabbed();
                        }
                    }, token);
                    await _captureTask;
                }
                catch (OperationCanceledException)
                {

                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.StackTrace);
                }
                finally
                {
                    _capture.Stop();
                    _capture.Dispose();
                }
            }
        }
Exemple #11
0
        private void LoadHandTestingPatternsFromDir(string path)
        {
            try
            {
                byte[] TestPatterns;
                MNistHeight = 32;
                MNistWidth = 32;
                MNistSize = MNistWidth * MNistHeight;
                int TrainingLabelCount = 9;
                int LabelImageCount = 100;
                TestingPatternsCount = TrainingLabelCount * LabelImageCount;
                TestPatterns = new byte[TestingPatternsCount * MNistSize];
                //Capture cap = new Capture(@"D:\ebooks\hand gestrue recognition\hand data set\mov\0.MOV");
                unsafe
                {

                    for (int ii = 0; ii < TrainingLabelCount; ii++)
                    {
                        string type = ii.ToString("D1");
                        //Image<Bgr, Byte> image = new Image<Bgr, byte>(path + "\\" + type + ".jpg").Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA); //Read the files as an 8-bit Bgr image  
                        //Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale
                        Capture cap = new Capture(path + "\\" + type + ".MOV");
                        for(int i =0; i<200;i++)
                        {
                            cap.QueryGrayFrame();//skip first 200 frames
                        }
                        for (int i = 0; i < LabelImageCount; i++)
                        {
                            Image<Gray, Byte> gray = cap.QueryGrayFrame().Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
                            for (int j = 0; j < MNistSize; j++)
                            {
                                TestPatterns[ii * MNistSize * LabelImageCount + i * MNistSize + j] = ((byte*)gray.MIplImage.imageData + j)[0];
                            }
                        }
                        cap.Dispose();
                    }
                }


                MNISTTesting = new ByteImageData[TestingPatternsCount];
                Parallel.For(0, TestingPatternsCount, parallelOption, j =>
                {
                    ByteImageData pattern = new ByteImageData(j / LabelImageCount, new byte[MNistSize]);
                    for (int i = 0; i < MNistSize; i++)
                    {
                        pattern.Image[i] = TestPatterns[(j * MNistSize) + i];
                    }
                    MNISTTesting[j] = pattern;
                });

            }
            catch (Exception)
            {
                throw;
            }
        }
        /// <summary>
        /// Загрузка кадра по номеру (с видео)
        /// </summary>
        /// <param name="videoFileName">Имя видеофайла</param>
        /// <param name="keyFrameIOInformation">Информация о кадре</param>
        /// <returns>Кард</returns>
        public Task<GreyVideoFrame> LoadFrameAsync(string videoFileName, KeyFrameIOInformation keyFrameIOInformation)
        {
            try
            {
                if (videoFileName == null || videoFileName.Length == 0)
                    throw new ArgumentNullException("Null videoFileName in LoadFrameAsync");
                if (keyFrameIOInformation == null)
                    throw new ArgumentNullException("Null keyFrameIOInformation in LoadFrameAsync");
                if (keyFrameIOInformation.Number < 0)
                    throw new ArgumentException("Error frameNumber in LoadFrameAsync");
                if (keyFrameIOInformation.Width <= 0)
                    throw new ArgumentException("Error Width in LoadFrameAsync");
                if (keyFrameIOInformation.Height <= 0)
                    throw new ArgumentException("Error Height in LoadFrameAsync");

                return Task.Run(() =>
                {
                  /*  string videoPath = System.IO.Path.GetDirectoryName(videoFileName);
                    string framesDirName = System.IO.Path.Combine(videoPath, "VideoFrames");
                    if (!Directory.Exists(framesDirName))
                        Directory.CreateDirectory(framesDirName);*/

                    GreyVideoFrame videoFrame = null;

                    int currentFrameNumnber = -1;
                    Capture capture = new Capture(videoFileName);
                    Image<Gray, byte> frame = null;
                    while (currentFrameNumnber != keyFrameIOInformation.Number)
                    {
                        frame = capture.QueryGrayFrame();
                        currentFrameNumnber++;
                    }
                    if (frame != null)
                    {
                       // string frameFileName = Path.Combine(framesDirName, keyFrameIOInformation.Number.ToString() + ".jpg");
                        frame = frame.Resize(keyFrameIOInformation.Width, keyFrameIOInformation.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR);
                      //  frame.Save(frameFileName);
                        videoFrame = CreateVideoFrame(frame, keyFrameIOInformation);
                    }
                    capture.Dispose();
                    return videoFrame;
                });
            }
            catch (Exception exception)
            {
                throw exception;
            }
        }
Exemple #13
0
        private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
        {
            //StreamWriter sw = new StreamWriter(@opendutpath.FileName, true);
            globalpar.testnum = 0;
            byte[] DUTResp = new byte[0];
            int inihsa = globalpar.hsa;
            int inihbp = globalpar.hbp;
            int inihfp = globalpar.hfp;

            do
            {
                if (backgroundWorker1.CancellationPending == true)
                {

                    e.Cancel = true;
                    break;

                }
                else
                {
                    globalpar.bitrate = ((globalpar.hsa + globalpar.hbp + globalpar.hfp + globalpar.hact) * (globalpar.vsa + globalpar.vbp + globalpar.vfp + globalpar.vact) * globalpar.pixelformat / globalpar.lanecnt * globalpar.fr / 1000000 / 2 + 1);
                    PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_HS_FREQ, ((float)globalpar.bitrate + 1) * 1000000, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_LP_FREQ, (float)18e+6, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HFPORCH, globalpar.hfp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HBPORCH, globalpar.hbp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HSYNC, globalpar.hsa, ref errMsg, ref statusMsg));

                    System.Threading.Thread.Sleep(1000);

                    if (globalpar.pixelformat == 24)
                    {
                        PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_888, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
                    }
                    else if (globalpar.pixelformat == 18)
                    {
                        PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_666, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
                    }
                    else
                    {
                        PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_565, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
                    }

                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                    System.Threading.Thread.Sleep(globalpar.waittime);
                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SAVE_DUT_RESPONSE, textboxrpcsave.Text, 100, globalpar.pixelformat + "bit" + globalpar.videotype + (globalpar.bitrate) * 2 + " Mbps" + " " + globalpar.hsa + " " + globalpar.hbp + " " + globalpar.hfp + measfluke(), 0, ref errMsg, ref statusMsg));
                    backgroundWorker1.ReportProgress(0);

                    if (checkBox_webcam.Checked == true)
                    {
                        cap = new Capture(0);
                        Image<Bgr, Byte> camimage = cap.QueryFrame();
                        //because we are using an autosize picturebox we need to do a thread safe update
                        DisplayImage(camimage.ToBitmap());
                        cap.Dispose();
                        string savepath = Path.GetDirectoryName(opendutpath.FileName);
                        pictureBox1.Image.Save(@savepath +"\\" + globalpar.pixelformat + "bit" + globalpar.videotype + (Convert.ToInt32(textbox_hsfreq.Text) * 2).ToString() + "Mbps.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
                    }

                    else
                    {

                    }

                    PE(client.PGRemoteCmd(RPCCmds.PG_ABORT, ref errMsg, ref statusMsg));
                    switch (globalpar.testnum)
                    {
                        case 0:
                            globalpar.hsa = globalpar.hsa + 20;
                            globalpar.testnum = 1;
                            break;
                        case 1:
                            globalpar.hbp = globalpar.hbp + 20;
                            globalpar.testnum = 2;
                            break;
                        case 2:
                            globalpar.hfp = globalpar.hfp + 20;
                            globalpar.testnum = 0;
                            break;

                    }
                    /*
                    globalpar.bitrate = ((globalpar.hsa + globalpar.hbp + globalpar.hfp + globalpar.hact) * (globalpar.vsa + globalpar.vbp + globalpar.vfp + globalpar.vact) * globalpar.pixelformat / globalpar.lanecnt * globalpar.fr / 1000000 / 2 + 1);
                    PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_HS_FREQ, ((float)globalpar.bitrate + 1) * 1000000, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_LP_FREQ, (float)18e+6, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HFPORCH, globalpar.hfp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HBPORCH, globalpar.hbp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HSYNC, globalpar.hsa, ref errMsg, ref statusMsg));
                    //sw.WriteLine(globalpar.hsa + " " + globalpar.hbp + " " + globalpar.hfp + " " + globalpar.bitrate);
                     */
                }

            } while (globalpar.bitrate < globalpar.targetbitrate);

            //sw.Close();
        }
Exemple #14
0
        private void backgroundskewswing_DoWork(object sender, DoWorkEventArgs e)
        {
            //StreamWriter sw = new StreamWriter(@opendutpath.FileName, true);
            globalpar.testnum = 0;
            byte[] DUTResp = new byte[0];
            int inihsa = globalpar.hsa;
            int inihbp = globalpar.hbp;
            int inihfp = globalpar.hfp;
            float[] cmvolt = new float[3] {
                 0.33F, 0.2F ,0.07F
            };
            float[] difvolt = new float[5] {
               0.1F, 0.28F, 0.22F, 0.16F , 0.14F
            };

            do
            {
                if (backgroundWorker1.CancellationPending == true)
                {

                    e.Cancel = true;
                    break;

                }
                else
                {
                    globalpar.bitrate = ((globalpar.hsa + globalpar.hbp + globalpar.hfp + globalpar.hact) * (globalpar.vsa + globalpar.vbp + globalpar.vfp + globalpar.vact) * globalpar.pixelformat / globalpar.lanecnt * globalpar.fr / 1000000 / 2 + 1);
                    PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_HS_FREQ, ((float)globalpar.bitrate + 1) * 1000000, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_LP_FREQ, (float)18e+6, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HFPORCH, globalpar.hfp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HBPORCH, globalpar.hbp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HSYNC, globalpar.hsa, ref errMsg, ref statusMsg));
                    float ui = 1F / ((globalpar.bitrate)*2) / 1000000F;
                    System.Threading.Thread.Sleep(1000);
                     if (globalpar.pixelformat == 24)
                    {
                        for (int i = 0; i < 3; i++) // CM volt
                        {
                            for (int j = 0; j < 5; j++) //diff volt
                            {
                                PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.ENABLE_AUTO_SET_CLOCK_DELAY, 0, ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.SET_HS_LOW_VOLT, 1, (cmvolt[i] - difvolt[j] * 2), ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.SET_HS_HIGH_VOLT, 1, (cmvolt[i] + difvolt[j] * 2), ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));

                                for (float k = 1; k <= 19; k++)
                                {
                                    float skew = ui / 20 * k;
                                    PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.SET_HS_DELAY, 4, skew, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));

                                    PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_888, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
                                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                                    System.Threading.Thread.Sleep(globalpar.waittime);

                                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.SAVE_DUT_RESPONSE, textboxrpcsave.Text, 100, globalpar.pixelformat + "bit , " + globalpar.videotype +" , " +(globalpar.bitrate) * 2 + " Mbps " + ", CM=" + cmvolt[i] + " mV , Diff = " + difvolt[j] + ", UI = " + (float)1/20*k +" , Delay time " + skew*1E12F +measfluke(), 0, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.PG_ABORT, ref errMsg, ref statusMsg));
                                }
                            }
                        }

                    }
                    else if (globalpar.pixelformat == 18)
                    {
                        for (int i = 0; i < 3; i++) // CM volt
                        {
                            for (int j = 0; j < 5; j++) //diff volt
                            {
                                PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.ENABLE_AUTO_SET_CLOCK_DELAY, 0, ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.SET_HS_LOW_VOLT, 1, (cmvolt[i] - difvolt[j] * 2), ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.SET_HS_HIGH_VOLT, 1, (cmvolt[i] + difvolt[j] * 2), ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));

                                for (float k = 1; k <= 19; k++)
                                {
                                    float skew = ui / 20 * k;
                                    PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.SET_HS_DELAY, 4, skew, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));

                                    PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_666, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
                                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                                    System.Threading.Thread.Sleep(globalpar.waittime);

                                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.SAVE_DUT_RESPONSE, textboxrpcsave.Text, 100, globalpar.pixelformat + "bit , " + globalpar.videotype + " , " + (globalpar.bitrate) * 2 + " Mbps , " + ", CM=" + cmvolt[i] + " mV , Diff = " + difvolt[j] + ", UI = " + (float)1 / 20 * k + measfluke(), 0, ref errMsg, ref statusMsg));
                                }
                            }
                        }

                    }
                    else
                    {
                        for (int i = 0; i < 3; i++) // CM volt
                        {
                            for (int j = 0; j < 5; j++) //diff volt
                            {
                                PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.ENABLE_AUTO_SET_CLOCK_DELAY, 0, ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.SET_HS_LOW_VOLT, 1, (cmvolt[i] - difvolt[j] * 2), ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.SET_HS_HIGH_VOLT, 1, (cmvolt[i] + difvolt[j] * 2), ref errMsg, ref statusMsg));
                                PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));

                                for (float k = 1; k <= 19; k++)
                                {
                                    float skew = ui / 20 * k;
                                    PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.SET_HS_DELAY, 4, skew, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));

                                    PE(client.MIPICmd(RPCDefs.PACKED_PIXEL_STREAM_565, 0, false, RPCDefs.DT_HS, 0, 1, 0, 0, textbox_videopicpath.Text, null, ref errMsg, ref statusMsg));
                                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                                    System.Threading.Thread.Sleep(globalpar.waittime);

                                    PE(client.MIPICmd(RPCDefs.BTA, 0, false, RPCDefs.DT_LP, 0, 0, 0, 0, "", null, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteQuery(RPCCmds.GET_DUT_RESPONSE, 0, ref DUTResp, ref errMsg, ref statusMsg));
                                    PE(client.PGRemoteCmd(RPCCmds.SAVE_DUT_RESPONSE, textboxrpcsave.Text, 100, globalpar.pixelformat + "bit , " + globalpar.videotype + " , " + (globalpar.bitrate) * 2 + " Mbps , " + ", CM=" + cmvolt[i] + " mV , Diff = " + difvolt[j] + ", UI = " + (float)1 / 20 * k + measfluke(), 0, ref errMsg, ref statusMsg));
                                }
                            }
                        }

                    }

                     backgroundskewswing.ReportProgress(0);

                    if (checkBox_webcam.Checked == true)
                    {
                        cap = new Capture(0);
                        Image<Bgr, Byte> camimage = cap.QueryFrame();
                        //because we are using an autosize picturebox we need to do a thread safe update
                        DisplayImage(camimage.ToBitmap());
                        cap.Dispose();
                        string savepath = Path.GetDirectoryName(opendutpath.FileName);
                        pictureBox1.Image.Save(@savepath +"\\" + globalpar.pixelformat + "bit" + globalpar.videotype + (Convert.ToInt32(textbox_hsfreq.Text) * 2).ToString() + "Mbps.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
                    }

                    else
                    {

                    }

                    PE(client.PGRemoteCmd(RPCCmds.PG_ABORT, ref errMsg, ref statusMsg));
                    switch (globalpar.testnum)
                    {
                        case 0:
                            globalpar.hsa = globalpar.hsa + 30;
                            globalpar.testnum = 1;
                            break;
                        case 1:
                            globalpar.hbp = globalpar.hbp + 30;
                            globalpar.testnum = 2;
                            break;
                        case 2:
                            globalpar.hfp = globalpar.hfp + 30;
                            globalpar.testnum = 0;
                            break;

                    }
                    /*
                    globalpar.bitrate = ((globalpar.hsa + globalpar.hbp + globalpar.hfp + globalpar.hact) * (globalpar.vsa + globalpar.vbp + globalpar.vfp + globalpar.vact) * globalpar.pixelformat / globalpar.lanecnt * globalpar.fr / 1000000 / 2 + 1);
                    PE(client.PGRemoteCmd(RPCCmds.START_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_HS_FREQ, ((float)globalpar.bitrate + 1) * 1000000, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_LP_FREQ, (float)18e+6, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.END_EDIT_CONFIG, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HFPORCH, globalpar.hfp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HBPORCH, globalpar.hbp, ref errMsg, ref statusMsg));
                    PE(client.PGRemoteCmd(RPCCmds.SET_TIMING_HSYNC, globalpar.hsa, ref errMsg, ref statusMsg));
                    //sw.WriteLine(globalpar.hsa + " " + globalpar.hbp + " " + globalpar.hfp + " " + globalpar.bitrate);
                     */
                }

            } while (globalpar.bitrate < globalpar.targetbitrate);

            //sw.Close();
        }
 public List<Image<Gray, byte>> AddImagesToHuman(string name)
 {
     var images = new List<Image<Gray, byte>>();
     var count = 0;
     var capture = new Capture();
     while (count < FaceCount)
     {
         var image = capture.QueryFrame().ToImage<Gray, byte>();
         var detectedFace = DetectFace(image);
         if (detectedFace != null)
         {
             images.Add(detectedFace);
             count++;
             OnCount(count, FaceCount);
             Thread.Sleep(500);
         }
     }
     ServicesWorker.GetInstance<HumanService>().AddHuman(name, images);
     capture.Dispose();
     return images;
 }
Exemple #16
0
      public void TestFileCapturePause()
      {
         
         int totalFrames1 = 0;

         Capture capture1 = new Capture(EmguAssert.GetFile("tree.avi"));
        
         //capture one will continute capturing all the frames.
         EventHandler captureHandle1 = delegate
         {
            Mat img = new Mat();
            capture1.Retrieve(img);
            totalFrames1++;
            Trace.WriteLine(String.Format("capture 1 frame {0}: {1}", totalFrames1, DateTime.Now.ToString()));
         };
         capture1.ImageGrabbed += captureHandle1;
         capture1.Start();

         System.Threading.Thread.Sleep(2);
         int totalFrames2 = 0;
         Capture capture2 = new Capture(EmguAssert.GetFile("tree.avi"));
         int counter = 0;
         //capture 2 will capture 2 frames, pause for 1 seconds, then continute;
         EventHandler captureHandle = delegate
         {
            counter++;
            totalFrames2++;

            bool needPause = (counter >= 2);
            if (needPause)
            {
               capture2.Pause();
               counter = 0;
            }

            Mat img = new Mat();
             capture2.Retrieve(img);
            Trace.WriteLine(String.Format("capture 2 frame {0}: {1}", totalFrames2, DateTime.Now.ToString()));

            if (needPause)
            {
               System.Threading.ThreadPool.QueueUserWorkItem(delegate
                  {
                     Trace.WriteLine("Sleep for 1 sec");
                     System.Threading.Thread.Sleep(1000);
                     capture2.Start();
                  });
            }

         };

         capture2.ImageGrabbed += captureHandle;
         capture2.Start();


         //int totalFrames = 69;
         Stopwatch s = Stopwatch.StartNew();
         while (! (totalFrames1 == totalFrames2))
         {
            System.Threading.Thread.Sleep(1000);

            if (s.ElapsedMilliseconds > 120 * 1000)
            {
               EmguAssert.IsTrue(false, "Unable to finished reading frames in 2 mins");
               break;
            }
         }
         capture1.Dispose();
         capture2.Dispose();
      }
Exemple #17
0
 private void camListComboBox_SelectedIndexChanged(object sender, EventArgs e)
 {
     KeyValuePair<int, string> SelectedItem = (KeyValuePair<int, string>)camListComboBox.SelectedItem;
     if (_camIndex != SelectedItem.Key)
     {
         _camIndex = SelectedItem.Key;
         if (_capture != null)
         {
             _capture.Dispose();
         }
         _capture = new Capture(_camIndex);
         GetCaptureInfo();
         _capture.Dispose();
     }
 }
        private void writeRallyVideoFromLoaded(double s, double e, VideoWriter writer, String loadedvideopath)
        {
            double start = Math.Floor(s);
            double end = Math.Ceiling(e);
            double startmsec = start * 1000;
            double endmsec = end * 1000;


            Capture tempcapture = new Capture(loaded_videopath);

            Image<Bgr, Byte> frame;
            if (tempcapture != null)
            {
                //tempcapture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_POS_MSEC, start);

                double fps2 = tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
                //tempcapture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_POS_MSEC, 100);

                for (int i = 0; i < (start * fps2); i++)
                    (tempcapture).QueryFrame();

                int durationframes = (int)((end - start) * fps2); // since c# sucks i have to do it manually just like any other crap

                int count = 0;
                while (count < durationframes)
                {
                    frame = (tempcapture).QueryFrame();
                    videoWriter.WriteFrame(frame);
                    count++;
                }
            }


            tempcapture.Dispose();
            videoWriter.Dispose();
        }
        private void button_startmove_Click(object sender, EventArgs e)
        {
            long start_time;
     

            // initiating a new move along with a new timestamp as identifier
            if (!new_move)
            {
                live_video_click_count = 0;

                // Enable the Spielzug/Move property buttons
                button_kill.Enabled = true;
                button_smash.Enabled = true;
                button_drop.Enabled = true;
                button_bigPoint.Enabled = true;
                button_timeout.Enabled = true;

                radioButton_playerupright.Enabled = true;
                radioButton_playerupleft.Enabled = true;
                radioButton_playerdownleft.Enabled = true;
                radioButton_playerdownright.Enabled = true;

                radioButton_playerupright.Checked = false;
                radioButton_playerupleft.Checked = false;
                radioButton_playerdownleft.Checked = false;
                radioButton_playerdownright.Checked = false;
                

                start_time = getCurrentTime(); // get current time as identifier
                while (List_timestamps.Contains(start_time))
                    start_time = getCurrentTime();
                
                List_timestamps.Add(start_time); // add timestamp to the list we use for the screenshots

                // Create a new Rally 
                Game.Current_rally = 
                    new Rally(configuration.Teama.Player1.Current_position,
                              configuration.Teama.Player2.Current_position,
                              configuration.Teamb.Player1.Current_position,
                              configuration.Teamb.Player2.Current_position,
                              start_time, Game.Sets.Count);

                
   

                // Clear the BirdView
                pictureBox_birdview.Invalidate();

                rallyframes = new List<Image<Bgr, byte>>();

                
                String move_identifier = start_time.ToString();
                String videopath = Program.getConfiguration().Mediafolderpath + @"\" + move_identifier + ".mpg";

                if (capture_device_index != -1)
                    this.videoWriter = new VideoWriter(videopath, Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1'), fps, 640, 480, true);


                // start a new video capture from video
                if (capture_device_index == -1)
                {
                    Capture tempcapture = new Capture(loaded_videopath);
                    int tempfps = (int)tempcapture.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FPS);
                    //this.videoWriter = new VideoWriter(videopath, Emgu.CV.CvInvoke.CV_FOURCC('P', 'I', 'M', '1'), tempfps, 640, 480, true);
                    startmilisecond = axWindowsMediaPlayer_live.Ctlcontrols.currentPosition;
                    axWindowsMediaPlayer_live.Ctlcontrols.play();
                    tempcapture.Dispose();
                }


                button_startmove.Text = "End of rally";
                button_startmove.ForeColor = System.Drawing.Color.Red;
                new_move = true;
            }
            else
            {
                live_video_click_count = 0;

                // Disable the Spielzug/Move property buttons
                button_kill.Enabled = false;
                button_smash.Enabled = false;
                button_drop.Enabled = false;
                button_bigPoint.Enabled = false;
                button_timeout.Enabled = false;

                radioButton_playerupright.Enabled = false;
                radioButton_playerupleft.Enabled = false;
                radioButton_playerdownleft.Enabled = false;
                radioButton_playerdownright.Enabled = false;

                radioButton_playerupright.Checked = false;
                radioButton_playerupleft.Checked = false;
                radioButton_playerdownleft.Checked = false;
                radioButton_playerdownright.Checked = false;

                // AUTO handling of score
                // Save into the list and add to xml output
                if (Game.Current_rally != null)
                {
                    Set current_set = Game.Sets[Game.Sets.Count - 1];
                    current_set.Rallies.Add(Game.Current_rally);
                    

                    // Set End Time
                    Game.Current_rally.EndRally_time = getCurrentTime();
                    Game.Current_rally.Duration_ticks = Game.Current_rally.EndRally_time - Game.Current_rally.Start_time;

                    // calculate the point for the successful team
                    Game.Current_rally.setNewScore(Game, configuration.TeamAup);
                    

                    xmlDoc.addRally(Game.Current_rally);


                    if (Game.Current_rally.Kill)
                        button_kill.Text = "KILL";
                    else
                        button_kill.Text = "NO KILL";

                    if (configuration.TeamAup)
                    {
                        textBox_scoreteamup.Text = current_set.TeamAScore.ToString();
                        textBox_scoreteamdown.Text = current_set.TeamBScore.ToString();
                    }
                    else
                    {
                        textBox_scoreteamup.Text = current_set.TeamBScore.ToString();
                        textBox_scoreteamdown.Text = current_set.TeamAScore.ToString();
                    }
                    // set color 
                    setScoreColor(current_set);
                    Team winner = current_set.getSetWinner(Game);

                    if (winner != null)
                    {
                        teamXhasWon();
                    }
                }


                

                // stop the capturing and write video
                if (capture_device_index != -1) // camera capture
                {
                    start_time = Game.Current_rally.Start_time;
                    WriteRallyVideoThread writevideoobject = new WriteRallyVideoThread(buffer, videoWriter, start_time);
                    writevideoobject.donewritingrallyvideo += new DoneWritingRallyVideoEventHandler(writevideothread_donewritingvideo);
                    writeRallyVideoFromBuffer(writevideoobject);
                }
                else // loaded video
                {
                    endmilisecond = axWindowsMediaPlayer_live.Ctlcontrols.currentPosition;
                    start_time = Game.Current_rally.Start_time;
                    WriteRallyVideoThread writevideoobject = new WriteRallyVideoThread(startmilisecond, endmilisecond, loaded_videopath, null, start_time);
                    writevideoobject.donewritingrallyvideo += new DoneWritingRallyVideoEventHandler(writevideothread_donewritingvideo);
                    writevideoobject.donewritingrallyframe += new DoneWritingRallyFrameEventHandler(writevideothread_donewritingframe);
                    writeRallyVideoFromLoaded(writevideoobject);
                    
                }


                button_startmove.Text = "Start of rally…"; // SAVE
                button_startmove.ForeColor = System.Drawing.Color.Black;
                new_move = false;

            }
        }
Exemple #20
0
        private void LoadHandTrainingPatternsFromDir(string path)
        {
            try
            {
                byte[] TrainPatterns;
                MNistHeight = 32;
                MNistWidth = 32;
                MNistSize = MNistWidth * MNistHeight;
                int TrainingLabelCount = 10;
                int LabelImageCount = 20;
                TrainingPatternsCount = TrainingLabelCount*LabelImageCount;

                TrainPatterns = new byte[TrainingPatternsCount * MNistSize];
                unsafe
                {

                    for (int ii = 0; ii < TrainingLabelCount; ii++)
                    {
                        string type = ii.ToString("D1");
                        //Image<Bgr, Byte> image = new Image<Bgr, byte>(path + "\\" + type + ".jpg").Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA); //Read the files as an 8-bit Bgr image  
                        //Image<Gray, Byte> gray = image.Convert<Gray, Byte>(); //Convert it to Grayscale
                        Capture cap = new Capture(path + "\\" + type + ".MOV");
                        for (int i = 0; i < LabelImageCount; i++)
                        {
                            Image<Gray, Byte> gray = cap.QueryGrayFrame().Resize(32, 32, Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
                            for (int j = 0; j < MNistSize; j++)
                            {
                                TrainPatterns[ii * MNistSize * LabelImageCount + i * MNistSize + j] = ((byte*)gray.MIplImage.imageData + j)[0];
                            }
                        }
                        cap.Dispose();
                    }
                }
                MNISTTraining = new ByteImageData[TrainingPatternsCount];
                Parallel.For(0, TrainingPatternsCount, parallelOption, j =>
                {
                    int label = j / LabelImageCount;
                    ByteImageData imageData = new ByteImageData(label, new byte[MNistSize]);
                    for (int i = 0; i < MNistSize; i++)
                    {
                        imageData.Image[i] = TrainPatterns[(j * MNistSize) + i];
                    }
                    MNISTTraining[j] = imageData;
                });

            }
            catch (Exception)
            {
                throw;
            }
        }
Exemple #21
0
        private void fFTWebcamToolStripMenuItem_Click(object sender, EventArgs e)
        {
            Image<Gray, float> fft_Amp = null;
            Image<Gray, float> fft_Phase = null;

            Capture capture = new Capture();
            img = capture.QueryFrame().Clone();
            capture.Dispose();

            FFT.GetFFTAmpAndPhase(img, out fft_Amp, out fft_Phase);

            fft_Amp = FFT.PrepareForVizualization(fft_Amp, true);
            fft_Phase = FFT.PrepareForVizualization(fft_Phase, false);

            ShowIMG.ShowIMGStatic(fft_Amp, fft_Phase);
        }
        public void CaptureFrame()
        {
            lbl3 = "0";
            lbl4 = "";
            NamePersons.Add("");

            grabber = new Capture();
            //Get the current frame form capture device
            try
            {
                currentFrame = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            catch { }
            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
          face,
          1.2,
          10,
          Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
          new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                //draw the face detected in the 0th (gray) channel with blue color
                currentFrame.Draw(f.rect, new Bgr(Color.Red), 2);


                if (trainingImages.ToArray().Length != 0)
                {
                    // UpdateRecognizer();
                    name = recognizer.Recognize(new Image<Gray, byte>(ImageProcessing.ImagePreProcessing(result.ToBitmap())));


                    //Draw the label for each face detected and recognized
                    currentFrame.Draw(name, ref font, new Point(f.rect.X - 2, f.rect.Y - 2), new Bgr(Color.LightGreen));

                }

                NamePersons[t - 1] = name;
                NamePersons.Add("");


                //Set the number of faces detected on the scene
                lbl3 = facesDetected[0].Length.ToString();

            }
            t = 0;

            //Names concatenation of persons recognized
            for (int nnn = 0; nnn < facesDetected[0].Length; nnn++)
            {
                names = names + NamePersons[nnn] + ", ";
            }
            //Show the faces procesed and recognized
            pictureBoxFrameGrabber.Image = currentFrame.ToBitmap();
            lbl3 = names;
            names = "";
            //Clear the list(vector) of names
            NamePersons.Clear();
            grabber.Dispose();
            grabber = null;

        }
        public List<Face> GetFaces(int numFaces, int minScore)
        {
            int frameCount = 0;
            capture = new Capture();
            _motionHistory = new MotionHistory(1.0, 0.05, 0.5);
            List<Face> foundfaces = new List<Face>();

            while (foundfaces.Count() < numFaces)
            {
                Mat mat = capture.QueryFrame();
                Image<Bgr, Byte> ImageFrame = mat.ToImage<Bgr, Byte>();

                frameCount = frameCount + 1;
                MotionInfo motion = this.GetMotionInfo(mat);
                List<Face> detectedFaces = FaceDetector.FindFaces(ImageFrame, this.FaceTrainingFile, this.EyeTrainingFile, this.Scale, this.Neighbors, this.FaceMinSize);

                if (frameCount > 2)
                {
                    foreach (Face face in detectedFaces)
                    {
                        face.MotionObjects = motion.MotionObjects;
                        face.MotionPixels = motion.MotionPixels;

                        if (face.FaceScore > minScore)
                        {
                            foundfaces.Add(face);
                        }
                    }
                }
            }

            capture.Dispose();
            capture = null;
            return foundfaces;
        }
        /********************************************************************************************************************************************/
        /************************************************ CAPTURE STUFF *****************************************************************************/

        public void listCaptureDevices()
        {
            disposeCapture();

            listBox_cameras.Items.Clear();

            for (int i = 0; i < 2; i++)
            {
                capture = new Capture(i);
                if (capture.QueryFrame() != null)
                    listBox_cameras.Items.Add("Camera "+i);
                capture.Dispose();
            }

            if (listBox_cameras.Items.Count > 0)
                listBox_cameras.SelectedIndex = 0;
            
        }
 public void DetectHuman()
 {
     var capture = new Capture();
     while (true)
     {
         try
         {
             var image = capture.QueryFrame().ToImage<Gray, byte>();
             var detectedFace = DetectFace(image);
             if (detectedFace != null)
             {
                 if (_faceRecognizerTrained)
                 {
                     if (RecognizeFamiliarPerson(detectedFace))
                         continue;
                 }
                 var gender = _genderFaceRecognizer.Predict(detectedFace);
                 if (gender.Label != -1)
                 {
                     GenderRecognized?.Invoke(gender.Label != 0, gender.Distance);
                 }
             }
         }
         catch (ThreadAbortException ex)
         {
             capture.Dispose();
         }
     }
 }
        private void ExtractVideoFrames(String Filename)
        {
            Capture _capture = new Capture(Filename);
            try
            {
                TotalVideoFrames = _capture.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_COUNT);
                VideoFPS = Math.Round(_capture.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FPS));
                VideoLength = Convert.ToInt32(Math.Round(TotalVideoFrames / VideoFPS));
                double frameNumber = 0.0;
                IBGFGDetector<Bgr> _detector = new FGDetector<Bgr>(FORGROUND_DETECTOR_TYPE.FGD);

                bool Reading = true;

                while (Reading)
                {
                    _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_POS_FRAMES, frameNumber);
                    _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_BRIGHTNESS, 100);
                    Image<Bgr, Byte> frame = _capture.QueryFrame();
                    if (frame != null)
                    {
                        frame = frame.Resize(Convert.ToDouble(ConfigurationSettings.AppSettings["OutFrameResizeScalefactor"]), Emgu.CV.CvEnum.INTER.CV_INTER_AREA);
                        frame._EqualizeHist();

                        frame.Save(OutImageLocation + "\\" + DateTime.Now.ToString("yyyyMMddHHmmssfff") + ".jpg");
                    }
                    else
                    {
                        Reading = false;
                    }

                    frameNumber += (VideoFPS * TimeDiff);
                    if (frameNumber > TotalVideoFrames)
                        Reading = false;

                }
            }
            catch (Exception ex)
            {
                throw ex;
            }
            finally
            {
                _capture.Dispose();
            }
        }
Exemple #27
0
 static void Run(string file)
 {
     //load image
     Mat image;// = new Mat(file,LoadImageType.Color);
     long detectionTime;
     
     //declare rectangles for detection
     List<Rectangle> breast = new List<Rectangle>();
     
     //disable cuda module
     bool tryUseCuda = false;
     bool tryUseOpenCL = true;
     int itr = 0;
     //capture video file
     Capture capture = new Capture(file);
     image = capture.QueryFrame();
     while (image != null)
     {
         if (itr == 161) break;
         Console.WriteLine(itr++);
         image = capture.QueryFrame();
         detect.detect1(image, "cascade.xml", breast, tryUseCuda, tryUseOpenCL,out detectionTime);
         //put rectangles
         foreach (Rectangle b in breast)
             CvInvoke.Rectangle(image, b, new Bgr(Color.Red).MCvScalar, 2);
         
     }
     capture.Dispose();
     
     
     
     //show image
     /*ImageViewer.Show(image, String.Format(
     "Completed face and eye detection using {0} in {1} milliseconds",
     (tryUseCuda && CudaInvoke.HasCuda) ? "GPU"
     : (tryUseOpenCL && CvInvoke.HaveOpenCLCompatibleGpuDevice) ? "OpenCL"
     : "CPU",
     detectionTime));*/
 }
Exemple #28
-1
        private void initCamera()
        {
            try
            {
                _capture = new Capture(0);
                _capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, Width);
                _capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, Height);
            }
            catch
            {
                if (_capture != null)
                    _capture.Dispose();
                _capture = null;

            }
        }