Beispiel #1
1
 public HandDetector()
 {
     videoCapture = CvCapture.FromCamera(0);
     cascade = CvHaarClassifierCascade.FromFile("Hand.Cascade.1.xml");
     if (videoCapture == null)
     {
         throw new Exception("Camera not found.");
     }
 }
Beispiel #2
0
        /// <summary>
        /// カメラやビデオファイルから一つのフレームを取り出し,それを展開して返す.
        /// この関数は,単純にcvGrabFrame とcvRetrieveFrame をまとめて呼び出しているだけである.
        /// 返された画像は,ユーザが解放したり,変更したりするべきではない.
        /// </summary>
        /// <param name="capture">ビデオキャプチャクラス</param>
        /// <returns>1フレームの画像 (GC禁止フラグが立っている). キャプチャできなかった場合はnull.</returns>
#else
        /// <summary>
        /// Grabs a frame from camera or video file, decompresses and returns it. 
        /// This function is just a combination of cvGrabFrame and cvRetrieveFrame in one call. 
        /// The returned image should not be released or modified by user. 
        /// </summary>
        /// <param name="capture">video capturing structure. </param>
        /// <returns></returns>
#endif
        public static IplImage QueryFrame(CvCapture capture)
        {
            if (capture == null)
                throw new ArgumentNullException("capture");
            
            IntPtr ptr = NativeMethods.cvQueryFrame(capture.CvPtr);
            if (ptr == IntPtr.Zero)
                return null;
            else
                return new IplImage(ptr, false);
        }
    // Use this for initialization
    void Start()
    {
        cascade = CvHaarClassifierCascade.FromFile(@"./Assets/haarcascade_frontalface_alt.xml");
        capture = Cv.CreateCameraCapture(0);
        Cv.SetCaptureProperty(capture, CaptureProperty.FrameWidth, CAPTURE_WIDTH);
        Cv.SetCaptureProperty(capture, CaptureProperty.FrameHeight, CAPTURE_HEIGHT);
        IplImage frame = Cv.QueryFrame(capture);
        Cv.NamedWindow("FaceDetect");

        CvSVM svm = new CvSVM ();
          	CvTermCriteria criteria = new CvTermCriteria (CriteriaType.Epsilon, 1000, double.Epsilon);
          	CvSVMParams param = new CvSVMParams (CvSVM.C_SVC, CvSVM.RBF, 10.0, 8.0, 1.0, 10.0, 0.5, 0.1, null, criteria);
    }
Beispiel #4
0
        public MainForm(User user)
        {
            heaer = new UcommHearerOne(RecognitionEvent, FalseRecognitionEvent);
            SetHearerEvent();
            this.user = user;
            this.speaker = new UcommSpeaker();
            InitializeComponent();
            mainCameraCapture = Cv.CreateCameraCapture(0);
            Task.Factory.StartNew( mainCameraLoop );
            lblUserGuide.Parent = pbxMainCamera;
            setFormText();

            InitializeVoiceMemo();
        }
	void Start () {
		moveVec = new Vector3(0,0,0);
		
        cap = CvCapture.FromCamera (0);
		w = new CvWindow("Original");

		IplImage frame = cap.QueryFrame ();
        
        cols = frame.Width;
		rows = frame.Height;
   
        myTexture2D = new Texture2D(cols/2, rows/2);
        prvs = new IplImage(cols, rows, BitDepth.U8, 1);
		frame.CvtColor (prvs, ColorConversion.BgrToGray);
	}
Beispiel #6
0
        /// <summary>
        /// 1つ目のカメラでカメラ初期化
        /// </summary>
        /// <param name="output"></param>
        public Camera(logOutput output = null)
        {
            LogOutput = output;

            // カメラの用意
            cap = Cv.CreateCameraCapture(0);
            Log(cap.CaptureType + ", " + cap.FrameWidth + "x" + cap.FrameHeight + ", " + cap.Mode);

            Cv.SetCaptureProperty(cap, CaptureProperty.FrameWidth, WIDTH);
            Cv.SetCaptureProperty(cap, CaptureProperty.FrameHeight, HEIGHT);

            // 検出器の用意
            cvHCC = Cv.Load<CvHaarClassifierCascade>("haarcascade_profileface.xml");
            stor = Cv.CreateMemStorage(0);
        }
Beispiel #7
0
 public void InitializeCapture()
 {
     if (captureDisposed)
     {
         try
         {
             capture = new CvCapture(0);
             captureDisposed = false;
         }
         catch (Exception e)
         {
             Console.Out.WriteLine(e.ToString());
         }
     }
 }
Beispiel #8
0
        public Form1()
        {
            InitializeComponent();
            hearing_mod = new HmfHearingAssist();
            checkBox1.Checked = false;

            this.label1.Parent = this.pictureBox1;
            label1.BackColor = Color.Transparent;

            left_camera_capture = Cv.CreateCameraCapture(0);
            right_camera_capture = Cv.CreateCameraCapture(0);

            left_camera_thread = new Thread(new ThreadStart(left_camera_roop));
            right_camera_thread = new Thread(new ThreadStart(right_camera_roop));
            left_camera_thread.Start();
            right_camera_thread.Start();
        }
Beispiel #9
0
 public Form1()
 {
     InitializeComponent();
     resultMap.Image = new Bitmap(resultMap.Width, resultMap.Height);
     _robot = InitializableSingleton<INxtRobot>.Instance;
     _map = InitializableSingleton<Map>.Instance;
     _webCam1 = CvCapture.FromCamera(CaptureDevice.DShow, Config.LeftCamera);
     _webCam1.Brightness = 50;
     _webCam1.Contrast = 100;
     _webCam2 = CvCapture.FromCamera(CaptureDevice.DShow, Config.RightCamera);
     _webCam2.Brightness = 50;
     _webCam2.Contrast = 100;
     timer1.Start();
     IsStopped = false;
     Action roboScan = BuildMap;
     Task.Run(roboScan);
 }
Beispiel #10
0
        public BgSubtractorMOG()
        {
            using (CvCapture capture = new CvCapture(Const.MovieHara)) // specify your movie file
            using (BackgroundSubtractorMOG mog = new BackgroundSubtractorMOG())            
            using (CvWindow windowSrc = new CvWindow("src"))
            using (CvWindow windowDst = new CvWindow("dst")) 
            {
                IplImage imgFrame;
                using (Mat imgFg = new Mat())
                while ((imgFrame = capture.QueryFrame()) != null)
                {
                    mog.Run(new Mat(imgFrame, false), imgFg, 0.01);
                    
                    windowSrc.Image = imgFrame;
                    windowDst.Image = imgFg.ToIplImage();
                    Cv.WaitKey(50);
                }               

            }
        }
	// Use this for initialization
	void Start () {

		windowCapture = new CvWindow("capture");
		
		try
		{
			capture = new CvCapture(0);
		}
		catch
		{
			Debug.Log("Error: cant open camera.");
		}

	//	capture.GrabFrame ();
	//	frame = capture.RetrieveFrame ();
	//	Cv.CvtColor(frame,prvs,ColorConversion.BgrToGray);
		
		//capture.QueryFrame ();
		//Cv.ShowImage("Hello",)

	}
        void Awake()
        {
            _cap = new CvCapture(0);

            _capImage = _cap.QueryFrame();
            _capRgbImage = new IplImage(_capImage.Width, _capImage.Height, BitDepth.U8, 3);
            Debug.Log(string.Format("Capture info : size{0}", _capImage.Size));
               	_capGrayImage0 = new IplImage(_capImage.Size, BitDepth.U8, 1);
            _capGrayImage1 = new IplImage(_capImage.Size, BitDepth.U8, 1);
            _pyramidImage0 = new IplImage(new CvSize(_capImage.Width + 8, _capImage.Height/3), BitDepth.U8, 1);
            _pyramidImage1 = new IplImage(new CvSize(_capImage.Width + 8, _capImage.Height/3), BitDepth.U8, 1);
            _eigImage = new IplImage(_capImage.Size, BitDepth.F32, 1);
            _tmpImage = new IplImage(_capImage.Size, BitDepth.F32, 1);
            Cv.ConvertImage(_capImage, _capGrayImage0, 0);
            width = _capImage.Width;
            height = _capImage.Height;

            _opticalFlowWinSize = new CvSize(opticalFlowWinSize, opticalFlowWinSize);
            _opticalFlowCrit = new CvTermCriteria(CriteriaType.Iteration | CriteriaType.Epsilon, ofCritIterations, ofCritError);

            _prevTime = _currTime = UnityEngine.Time.time;
        }
Beispiel #13
0
        /// <summary>
        /// basic constructor
        /// </summary>
        public File(bool _save, bool _read, int _interval)
        {
            if (_read == false)
            {
                if (_save == true)
                {
                    double fps = (double)1000 / _interval;

                    this.writer = new CvVideoWriter(
                        this.saveMachineVisionDialog(),
                        FourCC.MJPG,
                        fps,
                        new CvSize(640, 480)
                        );
                }

                this.cap = CvCapture.FromCamera(CaptureDevice.DShow, 0);
            }
            else
            {
                this.cap = CvCapture.FromFile(this.readMachineVisionDialog());
            }
        }
        private static double getFps(CvCapture capture)
        {
            while (capture.QueryFrame() == null)
            {
                /* start camera */
            }

            double counter = 0;
            double seconds = 0;
            var watch = Stopwatch.StartNew();
            while (capture.QueryFrame() != null)
            {
                counter++;
                seconds = watch.ElapsedMilliseconds / (double)1000;
                if (seconds >= 3)
                {
                    watch.Stop();
                    break;
                }
            }
            var fps = counter / seconds;
            return fps;
        }
Beispiel #15
0
        private void Button3_Click(object sender, EventArgs e)
        {
            if (this.Train1Run || this.Train2Run)
            {
                if (this.Train1Run)
                {
                    this.serialPort1.PortName = this.listBox1.SelectedItems[0].ToString();
                    this.serialPort1.Open();
                }
                if (this.Train2Run)
                {
                    this.serialPort2.PortName = this.listBox1.SelectedItems[1].ToString();
                    this.serialPort2.Open();
                }
                this.timer1.Start();
                CreateDB();
            }
            else
            {
                this.serialPort1.Close();
                this.serialPort2.Close();
                this.timer1.Stop();
                _capture = null;
                this.pictureBox1.Invalidate();
            }

            this.button3.Text = this.Train1Run ? "Port Close" : "Port Open";
            this.button3.BackColor = this.Train1Run ? Color.Red : Color.Black;
        }
 // Use this for initialization
 void Start()
 {
     capture = Cv.CreateCameraCapture(0);
     Cv.SetCaptureProperty(capture, CaptureProperty.FrameWidth, CAPTURE_WIDTH);
     Cv.SetCaptureProperty(capture, CaptureProperty.FrameHeight, CAPTURE_HEIGHT);
     captureTexture = new Texture2D(CAPTURE_WIDTH, CAPTURE_HEIGHT, TextureFormat.RGBA32, false);
     isCaptureDisplayEnabled = true;
 }
        private void workerDoReadVideo(object sender, DoWorkEventArgs e)
        {
            using (var capture = new CvCapture(@"..\..\Videos\drop.avi"))
            {
                var interval = (int)(1000 / capture.Fps);

                IplImage image;
                while ((image = capture.QueryFrame()) != null &&
                        _worker != null && !_worker.CancellationPending)
                {
                    _worker.ReportProgress(0, image);
                    Thread.Sleep(interval);
                }
            }
        }
Beispiel #18
0
        private void FormatVideo()
        {
            cap = CvCapture.FromFile(fileName);
            uLastFrame = (UInt16)cap.FrameCount;
            img = cap.QueryFrame();

            CvVideoWriter newVideo = new CvVideoWriter("videoFormated.avi", FourCC.Default ,cap.Fps, img.Size);

            setProgressBarMargins(0, uLastFrame);

            for (int i = 1; i < uLastFrame; i++ )
            {
                img = cap.QueryFrame();

                if (img == null) break;

                newVideo.WriteFrame(img);
                setProgressBarValue(i);

            }

            newVideo.Dispose();
            setProgressBarValue(0);
        }
Beispiel #19
0
 public Form1()
 {
     InitializeComponent();
     capture = CvCapture.FromCamera(1);
 }
Beispiel #20
0
 private void Click_Start(object sender, EventArgs e)
 {
     IPcamera = false;
     CAPTURE = Cv.CreateCameraCapture(0);
     解像度設定(int.Parse(textBox_resX.Text), int.Parse(textBox_resY.Text));
     CAPTURE.Fps = 1000/int.Parse(textBox_描画周期.Text);
     タイマー開始();
 }
        private void Button3_Click(object sender, EventArgs e)
        {
            this.Train1Run = !this.Train1Run;

            if (this.Train1Run)
            {
                CreateDB();
                this.serialPort1.Open();
                this.timer1.Start();
            }
            else
            {
                this.serialPort1.Close();
                this.timer1.Stop();
                _capture = null;
                this.pictureBox1.Invalidate();
            }

            this.button3.Text = this.Train1Run ? "Port Close" : "Port Open";
            this.button3.BackColor = this.Train1Run ? Color.Red : Color.Black;
        }
 private void CheckBox4_Click(object sender, EventArgs e)
 {
     CheckBox cb = (CheckBox)sender;
     _camCapturing = cb.Checked ? true : false;
     _capture = cb.Checked ? new CvCapture(0) : null;
 }
Beispiel #23
0
        private void videoAçToolStripMenuItem_Click(object sender, EventArgs e)
        {
            OpenFileDialog opd = new OpenFileDialog();
            if (opd.ShowDialog() == DialogResult.OK && opd.FileName.Length > 3)
            {
                cap = CvCapture.FromFile(opd.FileName);
                img = cap.QueryFrame();
                uLastFrame = (UInt16)cap.FrameCount;

                Size newSize = new Size(img.Width, img.Height);
                ekran.Size = newSize;

                ekran.Image = img.ToBitmap();
                uCurrentFrameNo = 1;
                frameText.Text = uCurrentFrameNo.ToString();
                newVideoOpened = true;
            }
        }