Esempio n. 1
1
	// Use this for initialization
	void Start () {
		Mat src = new Mat(Application.dataPath + "/lena.png", ImreadModes.GrayScale);
		Mat dst = new Mat();
		frame = new Mat ();
		Cv2.Canny(src, dst, 50, 200);
		tex = new Texture2D (dst.Width, dst.Height);
		tex.LoadImage (dst.ToBytes (".png", new int[]{0}));
		go.GetComponent<Renderer> ().material.mainTexture = tex;

		cap = new VideoCapture (1);
	}
        // Use this for initialization
        void Start()
        {
            rgbMat = new Mat ();

                        capture = new VideoCapture ();
                        capture.open (Utils.getFilePath ("768x576_mjpeg.mjpeg"));

                        if (capture.isOpened ()) {
                                Debug.Log ("capture.isOpened() true");
                        } else {
                                Debug.Log ("capture.isOpened() false");
                        }

                        Debug.Log ("CAP_PROP_FORMAT: " + capture.get (Videoio.CAP_PROP_FORMAT));
                        Debug.Log ("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get (Videoio.CV_CAP_PROP_PREVIEW_FORMAT));
                        Debug.Log ("CAP_PROP_POS_MSEC: " + capture.get (Videoio.CAP_PROP_POS_MSEC));
                        Debug.Log ("CAP_PROP_POS_FRAMES: " + capture.get (Videoio.CAP_PROP_POS_FRAMES));
                        Debug.Log ("CAP_PROP_POS_AVI_RATIO: " + capture.get (Videoio.CAP_PROP_POS_AVI_RATIO));
                        Debug.Log ("CAP_PROP_FRAME_COUNT: " + capture.get (Videoio.CAP_PROP_FRAME_COUNT));
                        Debug.Log ("CAP_PROP_FPS: " + capture.get (Videoio.CAP_PROP_FPS));
                        Debug.Log ("CAP_PROP_FRAME_WIDTH: " + capture.get (Videoio.CAP_PROP_FRAME_WIDTH));
                        Debug.Log ("CAP_PROP_FRAME_HEIGHT: " + capture.get (Videoio.CAP_PROP_FRAME_HEIGHT));

                        texture = new Texture2D ((int)(frameWidth), (int)(frameHeight), TextureFormat.RGBA32, false);
                        gameObject.transform.localScale = new Vector3 ((float)frameWidth, (float)frameHeight, 1);
                        float widthScale = (float)Screen.width / (float)frameWidth;
                        float heightScale = (float)Screen.height / (float)frameHeight;
                        if (widthScale < heightScale) {
                                Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
                        } else {
                                Camera.main.orthographicSize = (float)frameHeight / 2;
                        }

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
        }
Esempio n. 3
0
        private static void VideoCaptureSample()
        {
            var cap = new VideoCapture(0);

            if (!cap.IsOpened())
            {
                Console.WriteLine("Can't use camera.");
                return;
            }

            var frame = new Mat();
            cap.Grab();
            NativeMethods.videoio_VideoCapture_operatorRightShift_Mat(cap.CvPtr, frame.CvPtr);
            
            Window.ShowImages(frame);

            using (var window = new Window("window"))
            {
                while (true)
                {
                    cap.Read(frame);
                    window.ShowImage(frame);
                    int key = Cv2.WaitKey(50);
                    if (key == 'b')
                        break;
                }
            }
        }
Esempio n. 4
0
 void OnApplicationQuit()
 {
     if ( video !=null ) {
         video.Dispose();
         video = null;
     }
 }
Esempio n. 5
0
        public void Run()
        {
            var capture = new VideoCapture();
            capture.Set(CaptureProperty.FrameWidth, 640);
            capture.Set(CaptureProperty.FrameHeight, 480);
            capture.Open(-1);
            if (!capture.IsOpened())
                throw new Exception("capture initialization failed");

            var fs = FrameSource.CreateCameraSource(-1);
            var sr = SuperResolution.CreateBTVL1();
            sr.SetInput(fs);

            using (var normalWindow = new Window("normal"))
            using (var srWindow = new Window("super resolution"))
            {
                var normalFrame = new Mat();
                var srFrame = new Mat();
                while (true)
                {
                    capture.Read(normalFrame);
                    sr.NextFrame(srFrame);
                    if (normalFrame.Empty() || srFrame.Empty())
                        break;
                    normalWindow.ShowImage(normalFrame);
                    srWindow.ShowImage(srFrame);
                    Cv2.WaitKey(100);
                }
            }
        }
Esempio n. 6
0
    // Use this for initialization
    void Start()
    {
        // カメラを列挙する
        // 使いたいカメラのインデックスをVideoIndexに入れる
        // 列挙はUnityで使うのはOpenCVだけど、インデックスは同じらしい
        var devices = WebCamTexture.devices;
        for ( int i = 0; i < devices.Length; i++ ) {
            print( string.Format( "index {0}:{1}", i, devices[i].name) );
        }

        // ビデオの設定
        video = new VideoCapture( VideoIndex );
        video.Set( CaptureProperty.FrameWidth, Width );
        video.Set( CaptureProperty.FrameHeight, Height );

        print( string.Format("{0},{1}", Width, Height) );

        // 顔検出器の作成
        cascade = new CascadeClassifier( Application.dataPath + @"/haarcascade_frontalface_alt.xml" );

        // テクスチャの作成
        texture = new Texture2D( Width, Height, TextureFormat.RGB24, false );
        renderer.material.mainTexture = texture;

        // 変換用のカメラの作成
        _Camera = GameObject.Find( Camera.name ).camera;
        print( string.Format( "({0},{1})({2},{3})", Screen.width, Screen.height, _Camera.pixelWidth, _Camera.pixelHeight ) );
    }
Esempio n. 7
0
 //---------------------------------------------------------
 // 関数名 : setDevice
 // 機能   : カメラデバイス選択、設定
 // 引数   : index/カメラデバイス
 // 戻り値 : なし
 //---------------------------------------------------------
 public void setDevice(int index)
 {
     // カメラの設定
     video = new VideoCapture(index);
     video.Set(CaptureProperty.FrameWidth, GlobalVar.CAMERA_WIDTH);
     video.Set(CaptureProperty.FrameHeight, GlobalVar.CAMERA_HEIGHT);
     video.Set(CaptureProperty.Fps, GlobalVar.CAMERA_FPS);
 }
Esempio n. 8
0
 public OpenCVMovie OpenMovie(string fileName) {
     try {
         VideoCapture load = new VideoCapture(fileName);
         OpenCVMovie original = new OpenCVMovie(load);
         return original;
     } catch {
         MessageBox.Show("ошибка при открытии видео");
         return null;
     } 
 }
Esempio n. 9
0
        public void Run()
        {
            const string OutVideoFile = "out.avi";

            // Opens MP4 file (ffmpeg is probably needed)
            VideoCapture capture = new VideoCapture(FilePath.Bach);

            // Read movie frames and write them to VideoWriter 
            Size dsize = new Size(640, 480);
            using (VideoWriter writer = new VideoWriter(OutVideoFile, -1, capture.Fps, dsize))
            {
                Console.WriteLine("Converting each movie frames...");
                Mat frame = new Mat();
                while(true)
                {
                    // Read image
                    capture.Read(frame);
                    if(frame.Empty())
                        break;

                    Console.CursorLeft = 0;
                    Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);

                    // grayscale -> canny -> resize
                    Mat gray = new Mat();
                    Mat canny = new Mat();
                    Mat dst = new Mat();
                    Cv2.CvtColor(frame, gray, ColorConversion.BgrToGray);
                    Cv2.Canny(gray, canny, 100, 180);
                    Cv2.Resize(canny, dst, dsize, 0, 0, Interpolation.Linear);
                    // Write mat to VideoWriter
                    writer.Write(dst);
                } 
                Console.WriteLine();
            }

            // Watch result movie
            using (VideoCapture capture2 = new VideoCapture(OutVideoFile))
            using (Window window = new Window("result"))
            {
                int sleepTime = (int)(1000 / capture.Fps);

                Mat frame = new Mat();
                while (true)
                {
                    capture2.Read(frame);
                    if(frame.Empty())
                        break;

                    window.ShowImage(frame);
                    Cv2.WaitKey(sleepTime);
                }
            }
        }
Esempio n. 10
0
	// Use this for initialization
	private void Start () {						
		
		frame = new Mat ();
		cap = new VideoCapture (1);
		tex = new Texture2D (cap.FrameWidth, cap.FrameHeight);			 
		cap.Read (frame);

		dst = new Mat ();
		thresh = new Mat ();

		tex.LoadImage (frame.ToBytes (".png", new int[]{0}));
		go.GetComponent<Renderer> ().material.mainTexture = tex;
	}
Esempio n. 11
0
        public Camera()
        {
            _capture = Capture();
            while (!_capture.IsOpened())
            {
                Logger.Warn("Не могу подключиться к камере");
                Thread.Sleep(1000);
                _capture = Capture();
            }

            Logger.Success(String.Format("Подключился к камере ({0}x{1})",
                _capture.FrameWidth, _capture.FrameHeight));
        }
Esempio n. 12
0
 private void DeleteCapture()
 {
     if (null != video)
     {
         video.Dispose();
         video = null;
     }
     if (null != camera)
     {
         camera.Dispose();
         camera = null;
     }
 }
Esempio n. 13
0
        private void btnVideo_Click(object sender, EventArgs e)
        {
            DeleteCapture();
            var dialog = new OpenFileDialog()
            {
                Title = "Open Movie File",
                Filter = ".avi, .mpg, .wmv|*.avi;*.mpg;*.wmv|all|*.*"
            };
            if (dialog.ShowDialog(this) == DialogResult.OK)
            {

                DeleteCapture();
                video = new VideoCapture(dialog.FileName, this.pictureBox1, VideoCapture.VMR.VMR9);
                setSize(video.Width, video.Height);
                video.Play();
            }
        }
	// Use this for initialization
	private void Start () {						
		if (isVid) {
			frame = new Mat ();
			//gray = new Mat();
			cap = new VideoCapture (1);
			tex = new Texture2D (cap.FrameWidth, cap.FrameHeight);
			bkrnd_win_size = 20; //cap.FrameWidth / 5;
			cap.Read (frame);
		} else {
			frame = new Mat(Application.dataPath + "/profile_photo.png", ImreadModes.Color);
			tex = new Texture2D (frame.Width, frame.Height);
			bkrnd_win_size = 20;//frame.Width / 5;
		}
		frame_backproj = new Mat ();
		mask = new Mat ();
		tex.LoadImage (frame.ToBytes (".png", new int[]{0}));
		go.GetComponent<Renderer> ().material.mainTexture = tex;
		//myDetector = new CascadeClassifier ("C:/Users/admin/opencv/build/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml");
		bkrnd_rect = new OpenCvSharp.Rect(1,1,bkrnd_win_size,bkrnd_win_size);

	}
Esempio n. 15
0
    // Use this for initialization
    void Start()
    {
        // カメラを列挙する
        // 使いたいカメラのインデックスをVideoIndexに入れる
        // 列挙はUnityで使うのはOpenCVだけど、インデックスは同じらしい
        var devices = WebCamTexture.devices;
        for ( int i = 0; i < devices.Length; i++ ) {
            print( string.Format( "index {0}:{1}", i, devices[i].name ) );
        }

        // ビデオの設定
        video = new VideoCapture( VideoIndex );
        video.Set( CaptureProperty.FrameWidth, Width );
        video.Set( CaptureProperty.FrameHeight, Height );

        print( string.Format( "{0},{1}", Width, Height ) );

        // テクスチャの作成
        texture = new Texture2D( Width, Height, TextureFormat.RGB24, false );
        renderer.material.mainTexture = texture;
    }
Esempio n. 16
0
 public void Run()
 {
     using (var capture = new VideoCapture(FilePath.Bach))
     using (var mog = new BackgroundSubtractorMOG())
     using (var windowSrc = new Window("src"))
     using (var windowDst = new Window("dst"))
     {
         var frame = new Mat();
         var fg = new Mat();
         while (true)
         {
             capture.Read(frame);
             if(frame.Empty())
                 break;
             mog.Run(frame, fg, 0.01);
             
             windowSrc.Image = frame;
             windowDst.Image = fg;
             Cv2.WaitKey(50);
         }
     }
 }
        public void Render(IGameContext gameContext, IRenderContext renderContext)
        {
            if (renderContext.GraphicsDevice == null)
            {
                return;
            }

            if (_currentCamera != ActiveCamera)
            {
                if (_videoCapture != null)
                {
                    _videoCapture.Dispose();
                }

                _videoCapture = null;
            }

            _currentCamera = ActiveCamera;

            if (_videoCapture == null && _currentCamera != null)
            {
                _videoCapture = new VideoCapture(
                    renderContext.GraphicsDevice,
                    _currentCamera);
            }

            if (_videoCapture != null)
            {
                VideoCaptureFrame = _videoCapture.Frame;
                VideoCaptureUnlockedRGBA = _videoCapture.UnlockedFrameRGBA;
            }
            else
            {
                VideoCaptureFrame = null;
                VideoCaptureUnlockedRGBA = null;
            }
        }
Esempio n. 18
0
        private static void VideoCaptureSample()
        {
            var cap = new VideoCapture(0);

            if (!cap.IsOpened())
            {
                Console.WriteLine("Can't use camera.");
                return;
            }

            var frame = new Mat();

            using (var window = new Window("window"))
            {
                while (true)
                {
                    cap.Read(frame);
                    window.ShowImage(frame);
                    int key = Cv2.WaitKey(50);
                    if (key == 'b')
                        break;
                }
            }
        }
Esempio n. 19
0
        public void Run()
        {
            // Opens MP4 file (ffmpeg is probably needed)
            VideoCapture capture = new VideoCapture(FilePath.Movie.Bach);

            int sleepTime = (int)Math.Round(1000 / capture.Fps);

            using (Window window = new Window("capture"))
            {
                // Frame image buffer
                Mat image = new Mat();

                // When the movie playback reaches end, Mat.data becomes NULL.
                while (true)
                {
                    capture.Read(image); // same as cvQueryFrame
                    if(image.Empty())
                        break;

                    window.ShowImage(image);
                    Cv2.WaitKey(sleepTime);
                } 
            }
        }
Esempio n. 20
0
        public IEnumerator MakeVideo(TMPro.TMP_Text progressDisplay, TMPro.TMP_Text statusDisplay)
        {
            // 1. Create video capture
            m_vidCapturer = new VideoCapture(m_vidName);
            // capture validity check
            if (!m_vidCapturer.isOpened())
            {
                m_vidCapturer.release();
                m_vidCapturer = null;
                DanbiUtils.LogErr($"Failed to open the selected video at {m_vidName}");
                yield break;
            }

            // 2. init persistant resources
            receivedFrameMat  = new Mat((int)m_vidCapturer.get(4), (int)m_vidCapturer.get(3), CvType.CV_8UC4); // CV_8UC4 (RGBA).
            distortedFrameMat = new Mat((int)m_vidCapturer.get(4), (int)m_vidCapturer.get(3), CvType.CV_8UC4); // CV_8UC4 (RGBA).
            texForVideoFrame  = new Texture2D((int)m_vidCapturer.get(3), (int)m_vidCapturer.get(4), TextureFormat.RGBA32, false);

            // 4. calc video frame counts.
            m_currentFrameCount = 0;
            m_maxFrameCount     = (int)m_vidCapturer?.get(DanbiOpencvVideoCapturePropID.frame_count);

            // 5. get a codec for a video writer.
            // MJPG -> error!
            int codec_fourcc = DanbiOpencvVideoCodec_fourcc.get_fourcc_videoCodec(m_videoCodec);

            if (codec_fourcc == -999)
            {
                DanbiUtils.LogErr($"codec is invalid! codec propID -> {codec_fourcc}");
                yield break;
            }

            // 6. create a video writer
            var frameSize = new Size(m_vidCapturer.get(3), m_vidCapturer.get(4)); // width , height

            m_vidWriter = new VideoWriter(m_savedVideoPathAndName, codec_fourcc, m_targetFrameRate, frameSize, true);

            // while (m_currentFrameCount < m_dbgMaxFrameCount)
            while (m_currentFrameCount < m_maxFrameCount - 1)
            {
                if (m_isSaving)
                {
                    break;
                }

                // read the new Frame into 'newFrameMat'.
                if (!m_vidCapturer.read(receivedFrameMat))
                {
                    DanbiUtils.LogErr($"Failed to read the current video frame! <No next frame>");
                    break;
                }

                // testRT = new Mat((int)receivedFrameMat.get(4), (int)receivedFrameMat.get(3), CvType.CV_8UC4);
                // OpenCVForUnity.ImgprocModule.Imgproc.cvtColor(receivedFrameMat, testRT, OpenCVForUnity.ImgprocModule.Imgproc.COLOR_RGBA)

                if (receivedFrameMat.empty())
                {
                    DanbiUtils.LogErr("Frame failed to receive the captured frame from the video!");
                    break;
                }

                Utils.matToTexture2D(receivedFrameMat, texForVideoFrame);

                yield return(StartCoroutine(DistortCurrentFrame(texForVideoFrame)));

                Utils.texture2DToMat(texForVideoFrame, distortedFrameMat);

                if (distortedFrameMat.empty())
                {
                    DanbiUtils.LogErr("Frame failed to receive the distorted result!");
                    break;
                }

                // write the newFrameMat into the video writer
                m_vidWriter.write(distortedFrameMat);

                // TODO: update the text with DanbiStatusDisplayHelper
                // progressDisplayText.text = $"Start to warp" +
                //   "(500 / 25510) " +
                //   "(1.96001%)";
                // TODO: update the text with DanbiStatusDisplayHelper
                // statusDisplayText.text = "Image generating succeed!";

                ++m_currentFrameCount;
            }

            // dispose resources.
            m_vidCapturer.release();
            m_vidWriter.release();
            receivedFrameMat.release();
            distortedFrameMat.release();
            texForVideoFrame = null;

            // reset flags
            DanbiManager.instance.renderFinished = false;
            m_isSaving = false;

            Application.runInBackground = false;

            // wait the saved file
            yield return(new WaitUntil(() => new System.IO.FileInfo(m_savedVideoPathAndName).Exists));

            System.Diagnostics.Process.Start(@"" + m_savedVideoPath);
        }
Esempio n. 21
0
        private async void btStart_Click(object sender, EventArgs e)
        {
            mmError.Text = string.Empty;

            if (rbSTreamTypeFile.Checked)
            {
                _fileStream = new FileStream(edFilename.Text, FileMode.Open);
                _stream     = new ManagedIStream(_fileStream);

                // specifying settings
                MediaPlayer1.Source_Stream      = _stream;
                MediaPlayer1.Source_Stream_Size = _fileStream.Length;
            }
            else
            {
                _memorySource = File.ReadAllBytes(edFilename.Text);
                _memoryStream = new MemoryStream(_memorySource);
                _stream       = new ManagedIStream(_memoryStream);

                // specifying settings
                MediaPlayer1.Source_Stream      = _stream;
                MediaPlayer1.Source_Stream_Size = _memoryStream.Length;
            }

            // video and audio present in file. tune this settings to play audio files or video files without audio
            if (rbVideoWithAudio.Checked)
            {
                MediaPlayer1.Source_Stream_VideoPresent = true;
                MediaPlayer1.Source_Stream_AudioPresent = true;
            }
            else if (rbVideoWithoutAudio.Checked)
            {
                MediaPlayer1.Source_Stream_VideoPresent = true;
                MediaPlayer1.Source_Stream_AudioPresent = false;
            }
            else
            {
                MediaPlayer1.Source_Stream_VideoPresent = false;
                MediaPlayer1.Source_Stream_AudioPresent = true;
            }

            MediaPlayer1.Source_Mode = VFMediaPlayerSource.Memory_DS;

            MediaPlayer1.Audio_OutputDevice = "Default DirectSound Device";

            if (VideoCapture.Filter_Supported_EVR())
            {
                MediaPlayer1.Video_Renderer.Video_Renderer = VFVideoRenderer.EVR;
            }
            else if (VideoCapture.Filter_Supported_VMR9())
            {
                MediaPlayer1.Video_Renderer.Video_Renderer = VFVideoRenderer.VMR9;
            }
            else
            {
                MediaPlayer1.Video_Renderer.Video_Renderer = VFVideoRenderer.VideoRenderer;
            }

            MediaPlayer1.Debug_Mode = cbDebugMode.Checked;
            await MediaPlayer1.PlayAsync();

            tbTimeline.Maximum = (int)MediaPlayer1.Duration_Time().TotalSeconds;
            timer1.Enabled     = true;
        }
Esempio n. 22
0
 void Start()
 {
     capture = new VideoCapture(0);
     capture.ImageGrabbed += HandleGrab;
     classifier            = new CascadeClassifier("Assets/Resources/haarcascade_frontalface_default.xml");
 }
Esempio n. 23
0
 public bool CheckGetPokemon()
 {
     Log($"Dialog.CheckGetPokemon()");
     return(VideoCapture.Match(410, 970, ImageRes.Get("dialog_getpokemon"), DefaultImageCap, VideoCapture.LineSampler(3)));
 }
Esempio n. 24
0
    // Use this for initialization
    void Start()
    {
        face_cascade = new CascadeClassifier ("haarcascade_frontalface_alt.xml");
        eye_cascade = new CascadeClassifier ("haarcascade_eye.xml");

        int i;
        for (i=0; i<10; i++) {
            //cap = VideoCapture.FromCamera (i);
            cap = new VideoCapture(i);
            if (cap!=null)
                break;
        }
        /*
        Application.RequestUserAuthorization(UserAuthorization.WebCam);
        if (Application.HasUserAuthorization (UserAuthorization.WebCam)) {
            WebCamDevice[] devices = WebCamTexture.devices;
            string deviceName = devices [0].name;
            cameraTexture = new WebCamTexture (deviceName);
        }*/

        if (cap != null) {
            frameSize = new Size (cap.FrameWidth, cap.FrameHeight);
            tex = new Texture2D (cap.FrameWidth, cap.FrameHeight);
        }
        else if (cameraTexture != null) {
            frameSize = new Size (cameraTexture.width, cameraTexture.height);
            tex = new Texture2D (cameraTexture.width, cameraTexture.height);
        }
            GameObject.Find ("CamQuad").GetComponent<Renderer> ().material.mainTexture = tex;
            GameObject.Find ("CamQuad").GetComponent<Renderer> ().material.SetTextureScale ("_MainTex", new Vector2 (1.0f, -1.0f));

        cameraTexture.Play ();

        lastx = new float[10];
        lasty = new float[10];
    }
        private void MainWindow_Closed(object sender, EventArgs e)
        {
            if (this.VideoViewer != null && this.VideoViewer.IsLoaded)
            {
                this.VideoViewer.Close();
            }

            if (this.MyVideoCapture != null)
            {
                this.MyVideoCapture.Dispose();
                this.MyVideoCapture = null;
            }
        }
        public Mat RunPlugin(VideoCapture src)
        {
            capture = src;

            return(DetectHandSkin());
        }
Esempio n. 27
0
        public override int Run()
        {
            int device = 0;

            var argument = new StringList {
                "./"
            };
            FaceModelParameters det_parameters = new FaceModelParameters(argument);

            //vector<string> files, depth_directories, output_video_files, out_dummy;
            StringList files = new StringList(), output_video_files = new StringList(), out_dummy = new StringList();
            bool       u;
            string     output_codec;

            LandmarkDetector.get_video_input_output_params(files, out_dummy, output_video_files, out u, out output_codec, argument);

            CLNF clnf_model = new CLNF(det_parameters.model_location);

            float fx = 0, fy = 0, cx = 0, cy = 0;

            LandmarkDetector.get_camera_params(out device, out fx, out fy, out cx, out cy, argument);

            // If cx (optical axis centre) is undefined will use the image size/2 as an estimate
            bool cx_undefined = false;
            bool fx_undefined = false;

            if (cx == 0 || cy == 0)
            {
                cx_undefined = true;
            }
            if (fx == 0 || fy == 0)
            {
                fx_undefined = true;
            }

            //// Do some grabbing
            INFO_STREAM("Attempting to capture from device: " + device);
            using (VideoCapture video_capture = new VideoCapture(device))
            {
                using (Mat dummy = new Mat())
                    video_capture.Read(dummy);

                if (!video_capture.IsOpened())
                {
                    FATAL_STREAM("Failed to open video source");
                    return(1);
                }
                else
                {
                    INFO_STREAM("Device or file opened");
                }

                int frame_count    = 0;
                Mat captured_image = new Mat();
                video_capture.Read(captured_image);
                Size = new Size(captured_image.Width / SizeFactor, captured_image.Height / SizeFactor);
                using (var resized_image = captured_image.Resize(Size))
                {
                    // If optical centers are not defined just use center of image
                    if (cx_undefined)
                    {
                        cx = resized_image.Cols / 2.0f;
                        cy = resized_image.Rows / 2.0f;
                    }
                    // Use a rough guess-timate of focal length
                    if (fx_undefined)
                    {
                        fx = (float)(500 * (resized_image.Cols / 640.0));
                        fy = (float)(500 * (resized_image.Rows / 480.0));

                        fx = (float)((fx + fy) / 2.0);
                        fy = fx;
                    }
                }

                // Use for timestamping if using a webcam
                long t_initial = Cv2.GetTickCount();

                INFO_STREAM("Starting tracking");
                while (video_capture.Read(captured_image))
                {
                    using (var resized_image = captured_image.Resize(Size))
                    {
                        // Reading the images
                        MatOfByte grayscale_image = new MatOfByte();

                        if (resized_image.Channels() == 3)
                        {
                            Cv2.CvtColor(resized_image, grayscale_image, ColorConversionCodes.BGR2GRAY);
                        }
                        else
                        {
                            grayscale_image = (MatOfByte)resized_image.Clone();
                        }

                        // The actual facial landmark detection / tracking
                        bool detection_success = LandmarkDetector.DetectLandmarksInVideo(new SWIGTYPE_p_cv__Mat_T_uchar_t(grayscale_image.CvPtr), new SWIGTYPE_p_CLNF(CLNF.getCPtr(clnf_model)), new SWIGTYPE_p_FaceModelParameters(FaceModelParameters.getCPtr(det_parameters)));

                        // Visualising the results
                        // Drawing the facial landmarks on the face and the bounding box around it if tracking is successful and initialised
                        double detection_certainty = clnf_model.detection_certainty;

                        visualise_tracking(resized_image, ref clnf_model, ref det_parameters, frame_count, fx, fy, cx, cy);

                        // detect key presses
                        char character_press = (char)Cv2.WaitKey(15);
                        switch (character_press)
                        {
                        case 'r':
                            clnf_model.Reset();
                            break;

                        case 'q':
                            return(0);
                        }

                        // Update the frame count
                        frame_count++;

                        grayscale_image.Dispose();
                        grayscale_image = null;
                    }
                }
            }

            return(0);
        }
Esempio n. 28
0
        static void FindLaneInTheVideo(string path)
        {
            VideoCapture capture      = new VideoCapture(path);
            Mat          workAreaMask = CreateMask();

            using (Window win1 = new Window("test1"))
            {
                Mat image = new Mat();
                //  We will save previous results here
                List <List <Sensor> > oldResultGroups = null;
                int[] countTaked = new int[2] {
                    0, 0
                };
                while (true)
                {
                    DateTime dt1 = DateTime.Now;
                    capture.Read(image);
                    if (image.Empty())
                    {
                        break;
                    }

                    if (capture.PosFrames % 2 != 0)
                    {
                        continue;
                    }

                    //  Get the work area
                    Mat image_s = image.SubMat(Camera.vert_frame[0], Camera.vert_frame[1],
                                               Camera.hor_frame[0], Camera.hor_frame[1]);
                    Mat workArea = new Mat();
                    image_s.CopyTo(workArea, workAreaMask);

                    //  Get HSV, grat and canny
                    Mat hsv_image = workArea.CvtColor(ColorConversionCodes.RGB2HSV);
                    Mat canny1    = hsv_image.Canny(40, 60);
                    Mat gray      = workArea.CvtColor(ColorConversionCodes.BGR2GRAY);
                    Mat canny2    = gray.Canny(40, 60);
                    Mat canny     = new Mat();
                    Cv2.BitwiseAnd(canny1, canny2, canny);

                    //  Get, filter and draw contours
                    Mat hsv_contoures = new Mat();
                    hsv_image.CopyTo(hsv_contoures);
                    var contoures = FindContoures(canny);
                    hsv_contoures.DrawContours(contoures, -1, Scalar.Red);

                    //  Get indexers
                    MatOfByte3 hsv_cont_ind     = new MatOfByte3(hsv_contoures);
                    MatOfByte3 hsv_ind          = new MatOfByte3(hsv_image);
                    var        hsv_cont_indexer = hsv_cont_ind.GetIndexer();
                    var        hsv_indexer      = hsv_ind.GetIndexer();

                    //  Make steps of the algorithm
                    List <Sensor>         sensors                = GetSensors(hsv_contoures, hsv_cont_indexer);
                    List <Sensor>         filteredByContours     = FilterByContours(sensors, hsv_cont_indexer);
                    List <Sensor>         filteredByColors       = FilterByColorAndChangeColor(filteredByContours, hsv_indexer);
                    List <Sensor>         filteredByNearSensors  = FilterByNearSensors(filteredByColors);
                    List <List <Sensor> > groupedByAngle         = GroupByAngle(filteredByNearSensors).Where(g => g.Count > 2).ToList();
                    List <List <Sensor> > groupedByDistance      = GroupByDistance(groupedByAngle).Where(g => g.Count > 2).ToList();
                    List <List <Sensor> > groupedWithoudCovering = DeleteCovering(groupedByDistance);
                    List <List <Sensor> > unionGroups            = UnionGroups(groupedWithoudCovering).Where(g => g.Count > 2).ToList();
                    List <List <Sensor> > resultGroups           = SelectGroups(unionGroups, oldResultGroups, ref countTaked);
                    image.SaveImage("image.png");
                    //  Draw the result
                    foreach (var group in resultGroups)
                    {
                        if (group != null)
                        {
                            foreach (var line in GetLinesForGroup(group))
                            {
                                image.Line(line.x1 + Camera.hor_frame[0], line.y1 + Camera.vert_frame[0],
                                           line.x2 + Camera.hor_frame[0], line.y2 + Camera.vert_frame[0], Scalar.Blue, 5);
                            }
                        }
                    }
                    image.SaveImage("res.png");
                    Mat imageForDisplay = image.Resize(new Size(0, 0), 0.5, 0.5);
                    win1.ShowImage(imageForDisplay);
                    oldResultGroups = resultGroups;

                    DateTime dt2 = DateTime.Now;
                    Console.WriteLine("{0}\tms", (dt2 - dt1).TotalMilliseconds);

                    int key = Cv2.WaitKey(0);
                    if (key == 27)
                    {
                        break;            //escape
                    }
                    //  Free resourses
                    image_s.Release();
                    workArea.Release();
                    hsv_ind.Release();
                    hsv_cont_ind.Release();
                    gray.Release();
                    canny1.Release();
                    canny2.Release();
                    canny.Release();
                    hsv_image.Release();
                    hsv_contoures.Release();
                }
            }
        }
Esempio n. 29
0
        static void Main(string[] args)
        {
            //取得した画像とその差分画像を表示する
            #region
            //Mat src = new Mat(@"C:\Users\tyani\Downloads\lenna.png", ImreadModes.Grayscale);
            //// Mat src = Cv2.ImRead("lenna.png", ImreadModes.Grayscale);
            //Mat dst = new Mat();

            //Cv2.Canny(src, dst, 50, 200);
            //using (new Window("src image", src))
            //using (new Window("dst image", dst))
            //{
            //    Cv2.WaitKey();
            //}
            #endregion

            //キャプチャーした映像から顔を認識してその位置に丸を付ける
            #region
            //using (var win = new Window("capture"))
            //using (var video = new VideoCapture(0))
            //{
            //    //保存先行列
            //    var frame = new Mat();
            //    //分類器
            //    var haarcascade = new CascadeClassifier(@"C:\Users\tyani\Downloads\opencv-master\data\haarcascades\haarcascade_frontalface_default.xml");
            //    //グレースケール
            //    var gray = new Mat();

            //    while (true)
            //    {
            //        video.Read(frame);
            //        //grayにframeのグレースケール返還画像を代入
            //        Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
            //        //検出
            //        var faces = haarcascade.DetectMultiScale(gray);
            //        //検出箇所に円を描画
            //        foreach (OpenCvSharp.Rect face in faces)
            //        {
            //            var center = new Point
            //            {
            //                X = (int)(face.X + face.Width * 0.5),
            //                Y = (int)(face.Y + face.Height * 0.5)
            //            };
            //            var axes = new Size
            //            {
            //                Width = (int)(face.Width * 0.5),
            //                Height = (int)(face.Height * 0.5)
            //            };
            //            Cv2.Ellipse(frame, center, axes, 0, 0, 360, Scalar.Red, 4);
            //            Console.WriteLine("Found!");
            //        }

            //        win.ShowImage(frame);
            //        if (Cv2.WaitKey(30) >= 0) { break; }
            //    }
            //}
            #endregion

            //カメラキャリブレーションを行う
            #region

            ////チェス盤のコーナー座標がを読み込めた数
            //int imgInd = 0;

            ////読み込むチェス盤の定義
            //const int BOARD_W = 7;
            //const int BOARD_H = 7;
            //Size BOARD_SIZE = new Size(BOARD_W, BOARD_H);
            //const int SCALE = 26;

            ////精度の低いコーナーが検出できた座標のリスト
            //var imageCorners = new Mat<Point2f>();
            ////精度の高いコーナー座標の配列
            //Point2f[] imageCorners2;
            ////検出できたframe中のコーナー座標のリスト
            //var imagePoints = new List<Mat<Point2f>>();

            ////毎秒のフレーム画像
            //var frame = new Mat();
            ////フレーム画像のグレースケール
            //var gray = new Mat();

            ////認識したチェス盤画像
            //var chessboard = new List<Mat>();

            ////精度の高いコーナー座標を作る際の終了基準
            //var criteria = new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 30, 0.001);

            ////ウィンドウを生成
            //using (var capWindow = new Window("capture"))
            //using (var chessWindow = new Window("DrawChessboardCorner"))

            ////videoにカメラから取得した映像を代入
            //using (var video = new VideoCapture(0))
            //{
            //    while (true)
            //    {
            //        video.Read(frame);

            //        var key = Cv2.WaitKey(1);

            //        if (key == 'c')
            //        {
            //            Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
            //            //チェス盤のコーナーが検出できたらretにtrueを代入
            //            var ret = Cv2.FindChessboardCorners(frame, BOARD_SIZE, imageCorners);
            //            //Console.WriteLine(imageCorners.Get<Point2f>(0));

            //            if (ret)
            //            {
            //                Console.WriteLine("find chess board corners.");
            //                chessboard.Add(frame);
            //                //精度を高めたコーナー座標をimageCorners2に代入
            //                imageCorners2 = Cv2.CornerSubPix(gray, imageCorners, new Size(9, 9), new Size(-1, -1), criteria);
            //                //Console.WriteLine(imageCorners2[0]);

            //                Cv2.DrawChessboardCorners(frame, BOARD_SIZE, imageCorners2, ret);

            //                imageCorners.SetArray<Point2f>(imageCorners2);
            //                //Console.WriteLine(imageCorners.Get<Point2f>(0));
            //                //Console.WriteLine(imageCorners.Size());

            //                imagePoints.Add(imageCorners);
            //                chessWindow.ShowImage(frame);
            //                Cv2.WaitKey(500);

            //                imgInd++;
            //            }
            //        }
            //        else if (key == 'q')
            //        {
            //            break;
            //        }
            //        else
            //        {
            //            Cv2.PutText(frame, "Number of caputure: " + imgInd.ToString(), new Point(30, 20), HersheyFonts.HersheyPlain, 1, new Scalar(0, 255, 0));
            //            Cv2.PutText(frame, "c: Capture the image", new Point(30, 40), HersheyFonts.HersheyPlain, 1, new Scalar(0, 255, 0));
            //            Cv2.PutText(frame, "q: Finish capturing and calcurate the camera matrix and distortion", new Point(30, 60), HersheyFonts.HersheyPlain, 1, new Scalar(0, 255, 0));

            //            capWindow.ShowImage(frame);
            //        }
            //    }

            //    //よくわからない
            //    var objectPoints = new List<Mat<Point3f>>();
            //    var objectConrers = new Mat<Point3f>();
            //    for (int j = 0; j < BOARD_H; j++)
            //    {
            //        for (int i = 0; i < BOARD_W; i++)
            //        {
            //            objectConrers.PushBack(new Point3f(i * SCALE, j * SCALE, 0f));
            //        }
            //    }
            //    for (int i = 0; i < imgInd; i++)
            //        objectPoints.Add(objectConrers);

            //    var cameraMatrix = new Mat();
            //    var distCoeffs = new Mat();
            //    Mat[] rvecs;
            //    Mat[] tvecs;

            //    var rms = Cv2.CalibrateCamera(objectPoints, imagePoints, frame.Size(), cameraMatrix, distCoeffs, out rvecs, out tvecs);

            //    Console.WriteLine("Re-projection Error(unit: pixel) : " + rms);
            //    Console.WriteLine("cameraMatrix(unit: pixel) : " + cameraMatrix);
            //    Console.WriteLine("distCoeffs : " + distCoeffs);

            //    var dst = new Mat();
            //    Cv2.Undistort(chessboard[0], dst, cameraMatrix, distCoeffs);
            //    using (new Window("補正画像", WindowMode.AutoSize, dst))
            //    {
            //        while (true)
            //            if (Cv2.WaitKey() == 'q')
            //                break;
            //    }
            //}
            #endregion

            //SIFTマッチング
            #region
            //var leftFrame = new Mat();
            //var rightFrame = new Mat();
            //var view = new Mat();

            //var leftGray = new Mat();
            //var rightGray = new Mat();

            //var sift = SIFT.Create();
            //KeyPoint[] keypoints1, keypoints2;
            //var descriptors1 = new Mat();
            //var descriptors2 = new Mat();

            //using (var leftCapWindow = new Window("left capture"))
            //using (var rightCapWindow = new Window("right capture"))
            //using (var leftVideo = new VideoCapture(0))
            //using (var rightVideo = new VideoCapture(1))
            //{
            //    while (true)
            //    {
            //        leftVideo.Read(leftFrame);
            //        rightVideo.Read(rightFrame);

            //        Cv2.PutText(leftFrame, "d: detect and compute SIFT", new Point(30, 40), HersheyFonts.HersheyPlain, 1, new Scalar(0, 255, 0));
            //        Cv2.PutText(rightFrame, "d: detect and compute SIFT", new Point(30, 40), HersheyFonts.HersheyPlain, 1, new Scalar(0, 255, 0));

            //        leftCapWindow.ShowImage(leftFrame);
            //        rightCapWindow.ShowImage(rightFrame);

            //        var key = Cv2.WaitKey(1);

            //        if (key == 'd')
            //        {
            //            Cv2.CvtColor(leftFrame, leftGray, ColorConversionCodes.BGR2GRAY);
            //            Cv2.CvtColor(rightFrame, rightGray, ColorConversionCodes.BGR2GRAY);

            //            sift.DetectAndCompute(leftGray, null, out keypoints1, descriptors1);
            //            sift.DetectAndCompute(rightGray, null, out keypoints2, descriptors2);

            //            BFMatcher matcher = new BFMatcher(NormTypes.L2, false);
            //            DMatch[] matches = matcher.Match(descriptors1, descriptors2);

            //            Cv2.DrawMatches(leftGray, keypoints1, rightGray, keypoints2, matches, view);

            //            using (new Window("SIFT matching", WindowMode.AutoSize, view))
            //            {
            //                Cv2.WaitKey();
            //            }
            //        }
            //    }
            //}
            #endregion

            //視差マップ生成
            #region
            //var leftFrame = new Mat();
            //var rightFrame = new Mat();
            //var disparity = new Mat();

            //var leftGray = new Mat();
            //var rightGray = new Mat();

            //var sift = SIFT.Create();
            //KeyPoint[] keypoints1, keypoints2;
            //var descriptors1 = new Mat();
            //var descriptors2 = new Mat();

            //using (var leftCapWindow = new Window("left capture"))
            //using (var rightCapWindow = new Window("right capture"))
            //using (var leftVideo = new VideoCapture(1))
            //using (var rightVideo = new VideoCapture(0))
            //{
            //    while (true)
            //    {
            //        leftVideo.Read(leftFrame);
            //        rightVideo.Read(rightFrame);

            //        leftCapWindow.ShowImage(leftFrame);
            //        rightCapWindow.ShowImage(rightFrame);

            //        Cv2.CvtColor(leftFrame, leftGray, ColorConversionCodes.BGR2GRAY);
            //        Cv2.CvtColor(rightFrame, rightGray, ColorConversionCodes.BGR2GRAY);

            //        var stereo = StereoSGBM.Create(0, 16*10, 11, mode:StereoSGBMMode.HH);
            //        stereo.Compute(leftGray, rightGray, disparity);
            //        disparity.Normalize(alpha: 0, beta: 255, normType: NormTypes.MinMax, dtype: MatType.CV_8U);

            //        using (new Window("disparity map", WindowMode.AutoSize, disparity))
            //        {
            //            var key = Cv2.WaitKey(1);
            //            if (key == 'q')
            //                break;
            //        }
            //    }
            //}
            #endregion

            //もっとも簡単なStereoMatchingのTutorial
            #region
            //var imgL = new Mat(@"C:\Users\tyani\Downloads\scene1.row3.col1.png", ImreadModes.Grayscale);
            //var imgR = new Mat(@"C:\Users\tyani\Downloads\scene1.row3.col3.png", ImreadModes.Grayscale);
            //var disparity = new Mat();

            //var stereo = StereoBM.Create();
            //stereo.Compute(imgL, imgR, disparity);


            //using(new Window(disparity))
            //{
            //    Cv2.WaitKey(0);
            //}
            #endregion

            //ARマーカー
            #region

            using (var video0 = new VideoCapture(0))
                using (var video1 = new VideoCapture(1))
                    using (var window0 = new Window("capture0"))
                        using (var window1 = new Window("capture1"))
                        {
                            var dictionary = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict6X6_250);
                            var parameters = DetectorParameters.Create();

                            var frames = new List <Mat> {
                                new Mat(), new Mat()
                            };
                            var videos  = new List <VideoCapture>();
                            var windows = new List <Window>();

                            Point2f[][] corners;
                            int[]       ids;
                            int[]       previousIds;
                            Point2f[][] rejectedImgPoints;

                            videos.Add(video0);
                            videos.Add(video1);
                            windows.Add(window0);
                            windows.Add(window1);

                            var wasFoundList = new List <bool> {
                                false, false
                            };
                            var isTouchedList = new List <bool> {
                                false, false
                            };
                            var wasTouched = false;

                            while (true)
                            {
                                for (int i = 0; i < 2; i++)
                                {
                                    videos[i].Read(frames[i]);

                                    CvAruco.DetectMarkers(frames[i], dictionary, out corners, out ids, parameters, out rejectedImgPoints);

                                    isTouchedList[i] = wasFoundList[i] && !(ids.Length > 0);

                                    if (ids.Length > 0)
                                    {
                                        wasFoundList[i] = true;
                                        CvAruco.DrawDetectedMarkers(frames[i], corners, ids);
                                    }
                                    else
                                    {
                                        wasFoundList[i]  = false;
                                        isTouchedList[i] = true;
                                    }

                                    windows[i].ShowImage(frames[i]);
                                }

                                if (!isTouchedList.Contains(false))
                                {
                                    if (!wasTouched)
                                    {
                                        Console.WriteLine("Hello world!");
                                        for (int i = 0; i < isTouchedList.Count; i++)
                                        {
                                            isTouchedList[i] = false;
                                        }
                                    }
                                    wasTouched = true;
                                }
                                else
                                {
                                    wasTouched = false;
                                }

                                var key = Cv2.WaitKey(1);
                                if (key == 'q')
                                {
                                    break;
                                }
                            }
                        }
            #endregion

            #region
            //var numCamera = 1;

            //var dictionary = CvAruco.GetPredefinedDictionary(PredefinedDictionaryName.Dict7X7_50);
            //var parameters = DetectorParameters.Create();

            //var videoCaps = new List<VideoCapture>(numCamera);
            //var windows = new List<Window>(numCamera);

            //var capFrames = new List<Mat>(numCamera);
            ////var markerCornersList = new List<Point2f[][]>(numCamera);
            ////var markerIdsList = new List<int[]>(numCamera);
            ////var rejectedImgPointsList = new List<Point2f[][]>(numCamera);

            //var isFoundList = new List<bool>(numCamera);
            //var isTouchedList = new List<bool>(numCamera);

            //try
            //{
            //    for (int i = 0; i < numCamera; i++)
            //    {
            //        videoCaps.Add(new VideoCapture(i));
            //        windows.Add(new Window("VideoCapture" + i.ToString()));
            //    }

            //    while (true)
            //    {
            //        for (int i = 0; i < numCamera; i++)
            //        {
            //            var capFrame = new Mat();
            //            Point2f[][] markerCorners;
            //            int[] markerIds;
            //            Point2f[][] rejectedImgPoints;

            //            videoCaps[i].Read(capFrame);
            //            CvAruco.DetectMarkers(capFrame, dictionary, out markerCorners, out markerIds, parameters, out rejectedImgPoints);

            //            if (!isFoundList[i] && markerIds.Length > 0)
            //            {
            //                isFoundList[i] = true;
            //            }

            //            if (isFoundList[i] && !(markerIds[i] > 0))
            //            {
            //                isTouchedList[i] = true;
            //            }

            //            if (markerIds.Length > 0)
            //            {
            //                CvAruco.DrawDetectedMarkers(capFrame, markerCorners, markerIds);
            //            }

            //            windows[i].ShowImage(capFrame);
            //        }

            //        if (!isTouchedList.Contains(false))
            //        {
            //            Console.WriteLine("Hello world!");
            //        }

            //        var key = Cv2.WaitKey(1);
            //        if (key == 'q')
            //        {
            //            break;
            //        }
            //    }
            //}
            //catch
            //{
            //    Console.WriteLine("例外:カメラが取得できませんでした。");
            //}
            //finally
            //{
            //    for (int i = 0; i < numCamera; i++)
            //    {
            //        if (videoCaps[i] != null)
            //            videoCaps[i].Dispose();

            //        if (windows[i] != null)
            //        {
            //            windows[i].Dispose();
            //        }
            //    }
            //}

            //test

            #endregion
        }
Esempio n. 30
0
        public bool EnumerateCameras(List <int> camIdx)
        {
            camIdx.Clear();

            // list of all CAP drivers (see highgui_c.h)
            drivers = new List <CapDriver>();

            //  drivers.Add(new CapDriver { enumValue = CaptureDevice., "CV_CAP_MIL", "MIL proprietary drivers" });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.VFW, enumName = "VFW", comment = "platform native"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.V4L, enumName = "V4L", comment = "platform native"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Firewire, enumName = "FireWire", comment = "IEEE 1394 drivers"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Fireware, enumName = "Fireware", comment = "IEEE 1394 drivers"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Qt, enumName = "Qt", comment = "Quicktime"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Unicap, enumName = "Unicap", comment = "Unicap drivers"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.DShow, enumName = "DSHOW", comment = "DirectShow (via videoInput)"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.PVAPI, enumName = "PVAPI", comment = "PvAPI, Prosilica GigE SDK"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.OpenNI, enumName = "OpenNI", comment = "OpenNI(for Kinect) "
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.OpenNI_ASUS, enumName = "OpenNI_ASUS", comment = "OpenNI(for Asus Xtion) "
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Android, enumName = "Android", comment = "Android"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.XIAPI, enumName = "XIAPI", comment = "XIMEA Camera API"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.AVFoundation, enumName = "AVFoundation", comment = "AVFoundation framework for iOS (OS X Lion will have the same API)"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Giganetix, enumName = "Giganetix", comment = "Smartek Giganetix GigEVisionSDK"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.MSMF, enumName = "MSMF", comment = "Microsoft Media Foundation (via videoInput)"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.WinRT, enumName = "WinRT", comment = "Microsoft Windows Runtime using Media Foundation"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.IntelPERC, enumName = "IntelPERC", comment = "Intel Perceptual Computing SDK"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.OpenNI2, enumName = "OpenNI2", comment = "OpenNI2 (for Kinect)"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.OpenNI2_ASUS, enumName = "OpenNI2_ASUS", comment = "OpenNI2 (for Asus Xtion and Occipital Structure sensors)"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.GPhoto2, enumName = "GPhoto2", comment = "gPhoto2 connection"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.GStreamer, enumName = "GStreamer", comment = "GStreamer"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.FFMPEG, enumName = "FFMPEG", comment = "Open and record video file or stream using the FFMPEG library"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Images, enumName = "Images", comment = "OpenCV Image Sequence (e.g. img_%02d.jpg)"
            });
            drivers.Add(new CapDriver {
                enumValue = (int)CaptureDevice.Aravis, enumName = "Aravis", comment = "Aravis SDK"
            });



            string driverName, driverComment;
            int    driverEnum;
            bool   found;

            Console.WriteLine("Searching for cameras IDs...");
            for (int drv = 0; drv < drivers.Count; drv++)
            {
                driverName    = drivers[drv].enumName;
                driverEnum    = drivers[drv].enumValue;
                driverComment = drivers[drv].comment;
                Console.WriteLine("Testing driver " + driverName);
                found = false;

                int maxID = 100; //100 IDs between drivers
                if (driverEnum == (int)CaptureDevice.VFW)
                {
                    maxID = 10; //VWF opens same camera after 10 ?!?
                }
                for (int idx = 0; idx < maxID; idx++)
                {
                    VideoCapture cap = new VideoCapture(driverEnum + idx);  // open the camera

                    cap.Fps         = 120;
                    cap.FrameWidth  = 4096;
                    cap.FrameHeight = 2160;
                    cap.FourCC      = "MJPG";

                    if (cap.IsOpened())                  // check if we succeeded
                    {
                        Mat frame = new Mat();
                        found = true;
                        camIdx.Add(driverEnum + idx);  // vector of all available cameras
                        cap.Read(frame);

                        if (frame.Empty())
                        {
                            Console.WriteLine(driverName + "+" + idx + "\t opens: OK \t grabs: FAIL");
                        }
                        else
                        {
                            Console.WriteLine(driverName + "+" + idx + "\t opens: OK \t grabs: OK " + frame.Width + "x" + frame.Height + "@" + cap.Fps);
                        }
                    }
                    cap.Release();
                }
                if (!found)
                {
                    Console.WriteLine("Nothing !");
                }
            }
            Console.WriteLine(camIdx.Count() + " camera IDs has been found ");

            return(camIdx.Count() > 0);  // returns success
        }
        //private Image<Hsv, byte> HsvIm;
        //private DenseHistogram histogram = new DenseHistogram(255, new RangeF(0, 255)); //Histogram
        //private float[] GrayHist = new float[255]; //Histogram
        #endregion
        #region Constructor
        public DetectorFace(Form1 form)
        {
            this.form = form;
            imageBox1 = new Emgu.CV.UI.ImageBox
            {
                Location              = new Point(12, 12),
                Size                  = new Size(form.Width / 2 - 12, form.Height - 80), //просчитать размеры для оптимального детектирования картинки в зависимости от скалирования каскада
                Name                  = "imageBox1",
                TabIndex              = 2,
                TabStop               = false,
                FunctionalMode        = Emgu.CV.UI.ImageBox.FunctionalModeOption.Minimum,
                BorderStyle           = System.Windows.Forms.BorderStyle.None,
                BackgroundImageLayout = System.Windows.Forms.ImageLayout.Stretch
            };
            imageBox2 = new Emgu.CV.UI.ImageBox
            {
                Location              = new Point(form.Width / 2 - 12, 12),
                Size                  = new Size(form.Width / 2 - 12, form.Height - 80), //просчитать размеры для оптимального детектирования картинки в зависимости от скалирования каскада
                Name                  = "imageBox2",
                TabIndex              = 2,
                TabStop               = false,
                FunctionalMode        = Emgu.CV.UI.ImageBox.FunctionalModeOption.Minimum,
                BorderStyle           = System.Windows.Forms.BorderStyle.None,
                BackgroundImageLayout = System.Windows.Forms.ImageLayout.Tile
            };

            /*//trackBar
             * trackBar = new TrackBar()
             * {
             *  Location = new Point(),
             *  Size = new Size(),
             *  Name = "trackBar",
             *  TabIndex = 2,
             *  TabStop = false,
             *  TickFrequency = 1,
             *  Maximum = 255,
             *  Minimum = 0,
             *  Value = 50
             * };*/
            /*///HistogramBox
             * histogramBox = new Emgu.CV.UI.HistogramBox
             * {
             *  Location = new Point(form.Width / 2 - 12, 12),
             *  Size = new Size(form.Width / 2 - 12, form.Height - 80), //просчитать размеры для оптимального детектирования картинки в зависимости от скалирования каскада
             *  Name = "histogramBox",
             *  TabIndex = 2,
             *  TabStop = false,
             *  FunctionalMode =  Emgu.CV.UI.ImageBox.FunctionalModeOption.Minimum,
             *  BorderStyle = System.Windows.Forms.BorderStyle.None,
             *  BackgroundImageLayout = System.Windows.Forms.ImageLayout.Tile,
             * };*/
            this.form.Controls.Add(imageBox1);
            this.form.Controls.Add(imageBox2);
            // this.form.Controls.Add(histogramBox);

            capture = new VideoCapture(Config.Config.DefaultCameraIndex, VideoCapture.API.Any);
            capture.SetCaptureProperty(CapProp.FrameWidth, imageBox1.Width);
            capture.SetCaptureProperty(CapProp.FrameHeight, imageBox1.Height);

            pen = new Pen(Color.Red, 3);

            // histogramBox.GenerateHistogram("hist1", Color.Red, HsvIm.Mat, 256, HsvIm.Mat. );
            // histogramBox.GenerateHistograms(HsvIm, 250);
            // HsvIm = capture.QueryFrame().ToImage<Hsv, byte>();
            // TrackBar.Scroll += trackBar_Scroll; ///trackBar
            im = capture.QueryFrame().ToImage <Hsv, byte>();
            capture.ImageGrabbed += Capture_ImageGrabbed;
            pen = new Pen(Color.Green, 4);
        }
Esempio n. 32
0
 private void videoCaptureDevice1_ImageCaptured(object source, VideoCapture.ImageCapturedEventArgs e)
 {
     // set the picturebox picture
     CubeAnalyser.SourceImage = e.WebCamImage;
     saveImageToolStripMenuItem.Enabled = true;
     this.picbVideoCapture.Image = (Image)e.WebCamImage.Clone();
     this.picbFaceImg.Image = (Image)e.WebCamImage.Clone();
     FilterAllImages();
 }
Esempio n. 33
0
 public void GetClassHandles(VideoCapture.VideoCapture inVideo, Output.ControllerMax inMax, Output.TitanOne.Write inTo, VideoResolution inVid)
 {
     Class.VideoCapture = inVideo;
     Class.ControllerMax = inMax;
     Class.TitanOne = inTo;
     Class.VideoResolution = inVid;
 }
Esempio n. 34
0
        private static void Main()
        {
            try
            {
                var cap = new VideoCapture(0);
                //var cap = new VideoCapture("20090124_WeeklyAddress.ogv.360p.webm");
                if (!cap.IsOpened())
                {
                    Console.WriteLine("Unable to connect to camera");
                    return;
                }

                using (var win = new ImageWindow())
                {
                    // Load face detection and pose estimation models.
                    using (var detector = Dlib.GetFrontalFaceDetector())
                        using (var poseModel = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat"))
                        {
                            // Grab and process frames until the main window is closed by the user.
                            while (!win.IsClosed())
                            {
                                // Grab a frame
                                var temp = new Mat();
                                if (!cap.Read(temp))
                                {
                                    break;
                                }

                                // Turn OpenCV's Mat into something dlib can deal with.  Note that this just
                                // wraps the Mat object, it doesn't copy anything.  So cimg is only valid as
                                // long as temp is valid.  Also don't do anything to temp that would cause it
                                // to reallocate the memory which stores the image as that will make cimg
                                // contain dangling pointers.  This basically means you shouldn't modify temp
                                // while using cimg.
                                var array = new byte[temp.Width * temp.Height * temp.ElemSize()];
                                Marshal.Copy(temp.Data, array, 0, array.Length);
                                using (var cimg = Dlib.LoadImageData <RgbPixel>(array, (uint)temp.Height, (uint)temp.Width, (uint)(temp.Width * temp.ElemSize())))
                                {
                                    // Detect faces
                                    var faces = detector.Detect(cimg);
                                    // Find the pose of each face.
                                    var shapes = new List <FullObjectDetection>();
                                    for (var i = 0; i < faces.Length; ++i)
                                    {
                                        var det = poseModel.Detect(cimg, faces[i]);
                                        shapes.Add(det);
                                    }

                                    // Display it all on the screen
                                    win.ClearOverlay();
                                    win.SetImage(cimg);
                                    var lines = Dlib.RenderFaceDetections(shapes);
                                    win.AddOverlay(lines);

                                    foreach (var line in lines)
                                    {
                                        line.Dispose();
                                    }
                                }
                            }
                        }
                }
            }
            //catch (serialization_error&e)
            //{
            //    cout << "You need dlib's default face landmarking model file to run this example." << endl;
            //    cout << "You can get it from the following URL: " << endl;
            //    cout << "   http://dlib.net/files/shape_predictor_68_face_landmarks.dat.bz2" << endl;
            //    cout << endl << e.what() << endl;
            //}
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }
Esempio n. 35
0
        public static List<Mat> PreviewCamera(bool savePictureMode)
        {
            // Opens a camera device
            var capture = new VideoCapture(0);
            if (!capture.IsOpened())
                return null;
            var returnedMat = new List<Mat>();

            var frame = new Mat();
            var returnedframe = new Mat();

            while (true)
            {
                // Read image
                capture.Read(frame);
                if (frame.Empty())
                    return null;

                Cv2.ImShow("Camera", frame);
                var key = Cv2.WaitKey(30);

                if (key == 27) //wait for esc key
                {
                    Cv2.DestroyAllWindows();
                    return returnedMat;
                }
                if (key == 115) //wait for s
                {
                    returnedframe = frame.Clone();
                    returnedMat.Add(returnedframe);
                }
            }
        }
Esempio n. 36
0
        void Recognize(object param)
        {
            stop = false;
            var         mode       = (RecognizationModes)param;
            List <Rect> boundRect  = new List <Rect>();
            int         sleepTime  = 0;
            int         frameCount = 0;

            if (mode == RecognizationModes.image)
            {
                image = new Mat(path);
            }
            else
            {
                image   = new Mat();
                capture = new VideoCapture(0);
                double fps = capture.Fps;
                if (fps == 0)
                {
                    fps = 60;
                }
                sleepTime = (int)Math.Round(1000 / fps);
            }
            using (Mat grayImage = new Mat())
                using (Mat sobelImage = new Mat())
                    using (Mat tresholdImage = new Mat())
                    {
                        while (true)
                        {
                            if (frameCount % 6 == 0)
                            {
                                boundRect.Clear();
                            }
                            if (mode == RecognizationModes.camera)
                            {
                                capture.Read(image);
                            }
                            //make gray image
                            Cv2.CvtColor(image, grayImage, ColorConversionCodes.BGR2GRAY);

                            //sobel filter to detect vertical edges
                            Cv2.Sobel(grayImage, sobelImage, MatType.CV_8U, 1, 0);
                            Cv2.Threshold(sobelImage, tresholdImage, 0, 255, ThresholdTypes.Otsu | ThresholdTypes.Binary);

                            using (Mat element = Cv2.GetStructuringElement(MorphShapes.Rect, new OpenCvSharp.Size(10, 15)))
                            {
                                Cv2.MorphologyEx(tresholdImage, tresholdImage, MorphTypes.Close, element);
                                OpenCvSharp.Point[][] edgesArray = tresholdImage.Clone().FindContoursAsArray(RetrievalModes.External, ContourApproximationModes.ApproxNone);
                                foreach (OpenCvSharp.Point[] edges in edgesArray)
                                {
                                    OpenCvSharp.Point[] normalizedEdges = Cv2.ApproxPolyDP(edges, 17, true);
                                    Rect appRect = Cv2.BoundingRect(normalizedEdges);
                                    if (appRect.Height > 10 && appRect.Width > 20 && appRect.Height / (double)appRect.Width < 0.45)
                                    {
                                        boundRect.Add(appRect);
                                    }
                                }
                            }

                            foreach (Rect r in boundRect)
                            {
                                Mat cut = new Mat(image, r);
                                OpenCvSharp.Size size = new OpenCvSharp.Size(25, 25);
                                Cv2.GaussianBlur(cut, cut, size, 10);
                                image.CopyTo(cut);
                            }

                            frameCount++;
                            pictureBox1.Image = new Bitmap(OpenCvSharp.Extensions.BitmapConverter.ToBitmap(image));
                            //Cv2.ImWrite(@"D:\Visual_studio\RegistrationPlateRecognizer\Pictures\hidePlate.jpg", image);
                            Cv2.WaitKey(sleepTime);
                            if (mode == RecognizationModes.image)
                            {
                                return;
                            }

                            if (stop)
                            {
                                pictureBox1.Image = null;
                                return;
                            }
                        }
                    }
        }
Esempio n. 37
0
 /// <summary>
 /// 关闭窗口
 /// </summary>
 /// <param name="e"></param>
 protected override void OnClosing(CancelEventArgs e)
 {
     VideoCapture.Close();
     base.OnClosing(e);
 }
Esempio n. 38
0
 /// <summary>
 /// 窗口加载
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 private void Window_Loaded(object sender, RoutedEventArgs e)
 {
     VideoCapture.Play();
 }
Esempio n. 39
0
        static void Main(string[] args)
        {
            //var capture = new VideoCapture("rtmp://rtmp.open.ys7.com/openlive/61e96da9f12a4d058f4737d02c42998d");
            var capture = new VideoCapture("D:\\视频1\\192.168.1.65_01_20190314114136657_1.mp4");

            modelFile = "logs_2\\pb\\frozen_model.pb";
            //dir = "tmp";
            //List<string> files = Directory.GetFiles("img").ToList();
            //ModelFiles(dir);
            var graph = new TFGraph();
            // 从文件加载序列化的GraphDef
            var model = File.ReadAllBytes(modelFile);

            //导入GraphDef
            graph.Import(model, "");
            using (var windowSrc = new Window("src"))
                using (var frame = new Mat())
                    using (var image缩小 = new Mat())
                        using (var session = new TFSession(graph))
                        {
                            string file = "1.jpg";
                            //var labels = File.ReadAllLines(labelsFile);
                            Console.WriteLine("TensorFlow图像识别 LineZero");

                            //var frame = new Mat();
                            //var inrange = new Mat();
                            //var fg = new Mat();

                            while (true)
                            {
                                capture.Read(frame);
                                if (frame.Empty())
                                {
                                    break;
                                }
                                Cv2.Resize(frame, image缩小, new Size(280, 280), 0, 0, InterpolationFlags.Linear);//缩小28*28

                                Cv2.ImWrite(file, image缩小);

                                var tensor = CreateTensorFromImageFile(file);



                                // Run inference on the image files
                                // For multiple images, session.Run() can be called in a loop (and
                                // concurrently). Alternatively, images can be batched since the model
                                // accepts batches of image data as input.

                                var runner = session.GetRunner();
                                runner.AddInput(graph["x_input"][0], tensor).Fetch(graph["softmax_linear/softmax_linear"][0]);
                                var output = runner.Run();
                                // output[0].Value() is a vector containing probabilities of
                                // labels for each image in the "batch". The batch size was 1.
                                // Find the most probably label index.

                                var result = output[0];
                                var rshape = result.Shape;
                                if (result.NumDims != 2 || rshape[0] != 1)
                                {
                                    var shape = "";
                                    foreach (var d in rshape)
                                    {
                                        shape += $"{d} ";
                                    }
                                    shape = shape.Trim();
                                    Console.WriteLine($"Error: expected to produce a [1 N] shaped tensor where N is the number of labels, instead it produced one with shape [{shape}]");
                                    Environment.Exit(1);
                                }

                                // You can get the data in two ways, as a multi-dimensional array, or arrays of arrays,
                                // code can be nicer to read with one or the other, pick it based on how you want to process
                                // it
                                bool jagged = true;

                                var   bestIdx = 0;
                                float p = 0, best = 0;
                                if (jagged)
                                {
                                    var      probabilities = ((float[][])result.GetValue(jagged: true))[0];
                                    double[] d             = floatTodouble(probabilities);
                                    double[] retResult     = Softmax(d);

                                    for (int i = 0; i < retResult.Length; i++)
                                    {
                                        if (probabilities[i] > best)
                                        {
                                            bestIdx = i;
                                            best    = probabilities[i];
                                        }
                                    }
                                }
                                else
                                {
                                    var val = (float[, ])result.GetValue(jagged: false);

                                    // Result is [1,N], flatten array
                                    for (int i = 0; i < val.GetLength(1); i++)
                                    {
                                        if (val[0, i] > best)
                                        {
                                            bestIdx = i;
                                            best    = val[0, i];
                                        }
                                    }
                                }

                                //Console.WriteLine($"{Path.GetFileName(file)} 最佳匹配: [{bestIdx}] {best * 100.0}% 标识为:{labels[bestIdx]}");
                                string 标识1 = "";
                                switch (bestIdx)
                                {
                                case 0:
                                    标识1 = "kong0";
                                    break;

                                case 1:
                                    标识1 = "yao1";
                                    break;

                                case 2:
                                    标识1 = "yao2";
                                    break;

                                case 3:
                                    标识1 = "yao3";
                                    break;

                                case 4:
                                    标识1 = "yao4";
                                    break;

                                case 5:
                                    标识1 = "xian1";
                                    break;

                                case 6:
                                    标识1 = "xian2";
                                    break;

                                case 7:
                                    标识1 = "have7";
                                    break;
                                }
                                string 标识2 = "--: " + (best).ToString() + "%";

                                Point textPos = new Point(1, 100);

                                image缩小.PutText(标识1 + 标识2, textPos, HersheyFonts.HersheySimplex, 0.5, Scalar.White);


                                windowSrc.ShowImage(image缩小);
                                Cv2.WaitKey(10);
                            }
                        }
            Console.ReadKey();
        }
Esempio n. 40
0
        private void Form1_Load(object sender, EventArgs e)
        {
            Text += " (SDK v" + VideoCapture1.SDK_Version + ", " + VideoCapture1.SDK_State + ")";

            tmRecording.Elapsed += (senderx, args) =>
            {
                UpdateRecordingTime();
            };

            cbOutputFormat.SelectedIndex = 2;

            foreach (var device in VideoCapture1.Video_CaptureDevicesInfo)
            {
                cbVideoInputDevice.Items.Add(device.Name);
            }

            if (cbVideoInputDevice.Items.Count > 0)
            {
                cbVideoInputDevice.SelectedIndex = 0;
            }

            cbVideoInputDevice_SelectedIndexChanged(null, null);

            foreach (var device in VideoCapture1.Audio_CaptureDevicesInfo)
            {
                cbAudioInputDevice.Items.Add(device.Name);
            }

            if (cbAudioInputDevice.Items.Count > 0)
            {
                cbAudioInputDevice.SelectedIndex = 0;
                cbAudioInputDevice_SelectedIndexChanged(null, null);
            }

            cbAudioInputLine.Items.Clear();

            if (!string.IsNullOrEmpty(cbAudioInputDevice.Text))
            {
                var deviceItem =
                    VideoCapture1.Audio_CaptureDevicesInfo.First(device => device.Name == cbAudioInputDevice.Text);
                if (deviceItem != null)
                {
                    foreach (string line in deviceItem.Lines)
                    {
                        cbAudioInputLine.Items.Add(line);
                    }

                    if (cbAudioInputLine.Items.Count > 0)
                    {
                        cbAudioInputLine.SelectedIndex = 0;
                        cbAudioInputLine_SelectedIndexChanged(null, null);
                        cbAudioInputFormat_SelectedIndexChanged(null, null);
                    }
                }
            }

            string defaultAudioRenderer = string.Empty;

            foreach (string audioOutputDevice in VideoCapture1.Audio_OutputDevices)
            {
                cbAudioOutputDevice.Items.Add(audioOutputDevice);

                if (audioOutputDevice.Contains("Default DirectSound Device"))
                {
                    defaultAudioRenderer = audioOutputDevice;
                }
            }

            if (cbAudioOutputDevice.Items.Count > 0)
            {
                if (string.IsNullOrEmpty(defaultAudioRenderer))
                {
                    cbAudioOutputDevice.SelectedIndex = 0;
                }
                else
                {
                    cbAudioOutputDevice.Text = defaultAudioRenderer;
                }
            }

            edOutput.Text      = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\" + "output.mp4";
            edNewFilename.Text = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\" + "output_new.mp4";

            if (VideoCapture.Filter_Supported_EVR())
            {
                VideoCapture1.Video_Renderer.Video_Renderer = VFVideoRenderer.EVR;
            }
            else if (VideoCapture.Filter_Supported_VMR9())
            {
                VideoCapture1.Video_Renderer.Video_Renderer = VFVideoRenderer.VMR9;
            }
            else
            {
                VideoCapture1.Video_Renderer.Video_Renderer = VFVideoRenderer.VideoRenderer;
            }
        }
Esempio n. 41
0
        public frmFaceRecognition()
        {
            InitializeComponent();
            WebCam = new VideoCapture(2);
            #region camera
            pbWebCam.SizeMode = PictureBoxSizeMode.StretchImage;
            CaptureDevice     = new FilterInfoCollection(FilterCategory.VideoInputDevice);
            foreach (FilterInfo Device in CaptureDevice)
            {
                comboBox1.Items.Add(Device.Name);
            }
            #endregion camera
            #region initialization
            FaceDetection   = new CascadeClassifier(System.IO.Path.GetFullPath(@"../../Authentification/data/haarcascade_frontalface_default.xml"));
            EyeDetection    = new CascadeClassifier(System.IO.Path.GetFullPath(@"../../Authentification/data/haarcascade_eye.xml"));
            FaceRecognition = new EigenFaceRecognizer();
            Frame           = new Mat();
            Faces           = new List <Image <Gray, byte> >();
            IDs             = new List <int>();
            #endregion initialization


            #region eigen_list
            try
            {
                //Loading previously added images(negativ results)
                string Labelsinfo = File.ReadAllText(Application.StartupPath + "/TrainedFaces/TrainedLabels.txt");
                var    labels     = Labelsinfo.Split('%');
                foreach (string label in labels)
                {
                    IDs.Add(Convert.ToInt32(label));
                }
                IDs.RemoveAt(0);
                NumberOfTestImages = Convert.ToInt16(labels[0]);

                string faceString = "";
                for (int i = 1; i < NumberOfTestImages + 1; i++)
                {
                    faceString = "face" + i + ".bmp";
                    Faces.Add(new Image <Gray, byte>(Application.StartupPath + "/TrainedFaces/" + faceString));
                }
            }
            catch (Exception e)
            {
                MessageBox.Show("Error with loading data static data", "Loading error", MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
            }


            //FaceRecognition = new EigenFaceRecognizer(
            //    Faces.ToArray(),
            //    IDs.ToArray(),
            //    3000,
            //    double.PositiveInfinity
            //    );


            var arrayLength = Faces.Count;

            //Image<Gray, byte>[] facesArray = Faces.ToArray();
            //int[] IDsArray = IDs.ToArray();


            Mat[] mats   = new Mat[Faces.Count];
            int[] newIds = new int[Faces.Count];
            for (int i = 0; i < Faces.Count; i++)
            {
                mats[i]   = Faces[i].Mat;
                newIds[i] = IDs[i];
            }

            FaceRecognition.Train(mats, newIds);
            var result = FaceRecognition.Predict(mats[0]);



            #endregion eigen_list



            StartCamera();
        }
        private List <InputFileModel> GetInputFromStereoCamera(VideoCapture LeftCamera, VideoCapture RightCamera, int countInputFile = 0)
        {
            LeftCamera.Grab();
            RightCamera.Grab();
            Mat LeftImage  = new Mat();
            Mat RightImage = new Mat();

            LeftCamera.Retrieve(LeftImage);
            RightCamera.Retrieve(RightImage);
            LeftImage.Save(Path.Combine($@"{tempLeftStackDirectory}", $"Left_{countInputFile}.JPG"));
            RightImage.Save(Path.Combine($@"{tempRightStackDirectory}", $"Right_{countInputFile}.JPG"));


            var inputFileLeft = new InputFileModel(Path.Combine($@"{tempLeftStackDirectory}", $"Left_{countInputFile}.JPG"));
            var imageList     = _winForm.ImageList[(int)EListViewGroup.LeftCameraStack];
            var listViewer    = _winForm.ListViews[(int)EListViewGroup.LeftCameraStack];

            _fileManager.AddInputFileToList(inputFileLeft, _fileManager.listViewerModel.ListOfListInputFolder[(int)EListViewGroup.LeftCameraStack], imageList, listViewer);

            var inputFileRight = new InputFileModel(Path.Combine($@"{tempRightStackDirectory}", $"Right_{countInputFile}.JPG"));

            imageList  = _winForm.ImageList[(int)EListViewGroup.RightCameraStack];
            listViewer = _winForm.ListViews[(int)EListViewGroup.RightCameraStack];
            _fileManager.AddInputFileToList(inputFileLeft, _fileManager.listViewerModel.ListOfListInputFolder[(int)EListViewGroup.RightCameraStack], imageList, listViewer);

            var returnList = new List <InputFileModel>();

            returnList.Add(inputFileLeft);
            returnList.Add(inputFileRight);

            return(returnList);
        }
Esempio n. 43
0
        private void InterpretHelper(int index)
        {
            System.Drawing.Bitmap bitmap = GetBitmap(index);
            VideoCapture capture = new VideoCapture(index, bitmap);
            CopyPixelsFromDrawingBitmap(bitmap, _currentBuffer);
            _currentFrame = Bitmap.FromPixels(bitmap.Width, bitmap.Height, _currentBuffer);

            IBoundingBox invalidated = GetDirtyRect(_previousFrame, _currentFrame);

            var tags = new Dictionary<string, object>();
            tags.Add("videocapture", capture);
            tags.Add("invalidated", invalidated);
            tags.Add("previous", _previousTree);

            Tree root = Tree.FromPixels(_currentFrame, tags);


            Tree interpretation = _interpretationLogic.Interpret(root); 
            
            if (FrameInterpreted != null)
                FrameInterpreted(this, new InterpretedFrame(interpretation));

            //Swap the buffers we're writting to
            int[] tmp = _currentBuffer;
            _currentBuffer = _previousBuffer;
            _previousBuffer = tmp;


            //Set the previous bitmap and tree
            _previousTree = interpretation;
            _previousFrame = Bitmap.FromPixels(_currentFrame.Width, _currentFrame.Height, _previousBuffer);
        }
Esempio n. 44
0
        /// <summary>
        /// Initializes the DeviceClient and sets up the callback to receive
        /// messages containing temperature information
        /// </summary>
        static async Task Init(string connectionString)
        {
            try
            {
                LogUtil.Log($"Init: Connection String: {connectionString}", LogLevel.Info);

                var mqttSettings = new MqttTransportSettings(TransportType.Mqtt_Tcp_Only);
                // During dev you might want to bypass the cert verification. It is highly recommended to verify certs systematically in production
                mqttSettings.RemoteCertificateValidationCallback = (sender, certificate, chain, sslPolicyErrors) => true;

                ITransportSettings[] settings = { mqttSettings };
                // Open a connection to the Edge runtime
                _deviceClient = DeviceClient.CreateFromConnectionString(connectionString, settings);

                var moduleTwin = await _deviceClient.GetTwinAsync();

                moduleId = moduleTwin.ModuleId; // not work yet.
                LogUtil.Log($"moduleTwin.moduleId: {moduleId}");

                var moduleTwinCollection = moduleTwin.Properties.Desired;
                if (moduleTwinCollection["ModuleId"] != null)
                {
                    moduleId = moduleTwinCollection["ModuleId"];
                    LogUtil.Log($"ModuleId: {moduleId}", LogLevel.Info);
                }

                if (moduleTwinCollection["RTSP"] != null)
                {
                    string tRTSP = moduleTwinCollection["RTSP"];
                    LogUtil.Log($"RTSP: {tRTSP}", LogLevel.Info);

                    lock (captureLocker)
                    {
                        if (_cameraCapture != null)
                        {
                            _cameraCapture.Dispose();
                        }
                        _cameraCapture = new VideoCapture(tRTSP);

                        RTSP = tRTSP;
                    }
                    LogUtil.Log("RTSP Set", LogLevel.Info);
                }
                if (moduleTwinCollection["StorageConnectString"] != null)
                {
                    STORAGECONNECTSTRING = moduleTwinCollection["StorageConnectString"];
                    LogUtil.Log($"StorageConnectString: {STORAGECONNECTSTRING}", LogLevel.Info);
                }
                if (moduleTwinCollection["StorageURI"] != null)
                {
                    STORAGEURI = moduleTwinCollection["StorageURI"];
                    LogUtil.Log($"StorageURI: {STORAGEURI}", LogLevel.Info);
                }

                await _deviceClient.OpenAsync();

                LogUtil.Log("IoT Hub module client initialized.", LogLevel.Info);

                // Register callback to be called when a message is received by the module
                await _deviceClient.SetInputMessageHandlerAsync("input1", PipeMessage, _deviceClient);

                // Attach callback for Twin desired properties updates
                await _deviceClient.SetDesiredPropertyUpdateCallbackAsync(OnDesiredPropertiesUpdate, null);
            }
            catch (Exception e)
            {
                LogUtil.LogException(e);
            }
        }
Esempio n. 45
0
 public void OpenVideo(string fileName)
 {
     capture = new VideoCapture(fileName);
 }
Esempio n. 46
0
        private static async Task GetFrame()
        {
            Mat frame    = null;
            Mat newFrame = null;

            try
            {
                frame = _cameraCapture.RetrieveMat();

                if (frame != null && (!frame.Empty()))
                {
                    newFrame = frame.Resize(new Size(CAPTUREWIDTH, CAPTUREHEIGHT));

                    lock (frameLoker)
                    {
                        if (latestFrame == newFrame)
                        {
                            LogUtil.Log("latest frame the same !!!!!!");
                        }
                        if (latestFrame != null)
                        {
                            latestFrame.Dispose();
                        }
                        latestFrame = newFrame.Clone();

                        matBuffer.Add(newFrame.Clone());

                        if (matBuffer.Count >= bufferSize)
                        {
                            var firstmat = matBuffer[0];
                            matBuffer.RemoveAt(0);
                            firstmat.Dispose();
                        }
                    }
                }
                else
                {
                    if (frame != null)
                    {
                        frame.Dispose();
                    }
                    failure++;
                    LogUtil.Log($"failed {failure} times.", LogLevel.Warning);
                }

                if (failure >= MAXFAILTIMES)
                {
                    LogUtil.Log($"Begin to Reset VideoCapture after failed {MAXFAILTIMES} times.", LogLevel.Warning);
                    await Task.Delay(5000);

                    lock (captureLocker)
                    {
                        if (_cameraCapture != null)
                        {
                            _cameraCapture.Dispose();
                        }
                        _cameraCapture = new VideoCapture(RTSP);
                    }
                    failure = 0;
                    LogUtil.Log($"Reset VideoCapture after failed {MAXFAILTIMES} times.", LogLevel.Warning);
                }
            }
            catch (Exception e)
            {
                LogUtil.LogException(e);
            }
            finally
            {
                if (frame != null)
                {
                    frame.Dispose();
                }
                if (newFrame != null)
                {
                    newFrame.Dispose();
                }
            }
        }
Esempio n. 47
0
 public bool Hatching()
 {
     Log($"EggHatch.Hatched()");
     return(VideoCapture.Match(415, 895, ImageRes.Get("egg_hatched"), DefaultImageCap, VideoCapture.LineSampler(3)));
 }
Esempio n. 48
0
        static void Main(string[] args)
        {
            try
            {
                LogUtil.Log("start", LogLevel.Info);

                // The Edge runtime gives us the connection string we need -- it is injected as an environment variable
                string connectionString = Environment.GetEnvironmentVariable("EdgeHubConnectionString");

                Init(connectionString).Wait();


                LogUtil.Log("device client opend!", LogLevel.Info);

                // --for debug only--
                // RTSP = "rtsp://*****:*****@10.172.94.234:554/h264/ch1/main/av_stream";
                // RTSP = "rtsp://23.102.236.116:5554/test2.mpg";

                _cameraCapture = new VideoCapture(RTSP);

                _hog = new HOGDescriptor();
                _hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());


                Task.Run(async() =>
                {
                    while (true)
                    {
                        var tRtsp = "";
                        lock (captureLocker)
                        {
                            tRtsp = RTSP;
                        }

                        if (!String.IsNullOrEmpty(tRtsp) && _cameraCapture != null)
                        {
                            break;
                        }
                        else
                        {
                            await Task.Delay(2000);
                            LogUtil.Log("Check RTSP", LogLevel.Warning);
                        }
                    }

                    while (true)
                    {
                        await GetFrame();
                    }
                });

                Task.Run(async() =>
                {
                    while (true)
                    {
                        await ProcessFrame();
                        await Task.Delay(1000);
                    }
                }).Wait();

                LogUtil.Log("Main Finshed!");
            }
            catch (Exception e)
            {
                LogUtil.LogException(e);
            }
        }
Esempio n. 49
0
 public VideoProcessing(VideoCapture vc, VideoOverlaySettings settings)
 {
     videoCapture = vc;
     s = settings;
 }
Esempio n. 50
0
        public static int Main(string[] args)
        {
            SerialPort serial;

            try
            {
                IEnumerable <PortInfo> ports = PortInfo.GetPorts();
                string port = ports.FirstOrDefault((pi) => pi.Description == serialDevice)?.DeviceID;
                if (port != null)
                {
                    serial = new SerialPort(port, baudRate);
                }
                else
                {
                    throw new IOException("Could not connect to serial port.");
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
                return(1);
            }

            Console.WriteLine();
            TcpListener listener = new TcpListener(IPAddress.Any, 5050);

re:
            try
            {
                listener.Start();
                Console.WriteLine("Listening " + listener.LocalEndpoint);
                using (var client = listener.AcceptTcpClient())
                {
                    listener.Stop();
                    Console.WriteLine("Connection established " + client.Client.RemoteEndPoint);
                    using (NetworkStream stream = client.GetStream())
                    {
                        int width, height, fps, quality;
                        using (var reader = new BinaryReader(stream, System.Text.Encoding.ASCII, true))
                        {
                            using (var writer = new BinaryWriter(stream, System.Text.Encoding.ASCII, true))
                            {
                                width   = reader.ReadInt32();
                                height  = reader.ReadInt32();
                                fps     = reader.ReadInt32();
                                quality = reader.ReadInt32();

                                Console.WriteLine("Parameters received:");
                                Console.WriteLine($"    Width:   {width}");
                                Console.WriteLine($"    Height:  {height}");
                                Console.WriteLine($"    Fps:     {fps}");
                                Console.WriteLine($"    Quality: {quality}");

                                var encoderParam = new ImageEncodingParam(ImwriteFlags.JpegQuality, quality);
                                using (var cap = new VideoCapture(0)
                                {
                                    FrameWidth = width, FrameHeight = height, Fps = fps
                                })
                                {
                                    Console.WriteLine("Started video capturing...");
                                    Mat imgMatrix = new Mat();
                                    Mat mask      = new Mat();
                                    Mat tresh     = new Mat();


                                    try
                                    {
                                        Scalar lastSeen  = default(Scalar);
                                        bool   autoPilot = false,
                                               camAuto   = false,
                                               moveAuto  = false,
                                               none;

                                        while (client.Connected && stream.CanWrite)
                                        {
                                            Command commandTaken = (Command)reader.ReadByte();

                                            autoPilot = commandTaken.HasFlag(Command.AutoPilot);
                                            camAuto   = commandTaken.HasFlag(Command.CamAuto);
                                            moveAuto  = commandTaken.HasFlag(Command.MoveAuto);
                                            none      = commandTaken == Command.None;


                                            if (cap.Read(imgMatrix))
                                            {
                                                Cv2.CvtColor(imgMatrix, mask, ColorConversionCodes.BGR2HSV);
                                                Cv2.InRange(mask, greenLower, greenUpper, tresh);
                                                Cv2.Erode(tresh, tresh, null, iterations: 2);
                                                Cv2.Dilate(tresh, tresh, null, iterations: 2);

                                                Cv2.FindContours(
                                                    tresh,
                                                    out Point[][] contours,
                                                    out HierarchyIndex[] hierarchyIndexes,
                                                    RetrievalModes.External,
                                                    ContourApproximationModes.ApproxSimple
                                                    );

                                                if (contours.Length > 0)
                                                {
                                                    contours.OrderBy(element => Cv2.ContourArea(element));
                                                    Point[] max = contours[contours.Length - 1];

                                                    Cv2.MinEnclosingCircle(max, out Point2f xy, out float radius);

                                                    //Moments M = Cv2.Moments(max);
                                                    //Point center = new Point((M.M10 / M.M00), (M.M01 / M.M00));

                                                    Point center = new Point(xy.X, xy.Y);

                                                    if (radius > 10.0f)
                                                    {
                                                        Cv2.Circle(imgMatrix, center, (int)radius, new Scalar(0, 255, 255), thickness: 2);
                                                        Cv2.Circle(imgMatrix, center, 5, new Scalar(0, 0, 255), thickness: -1);

                                                        //find the ball which region on the screen horizontally [0-5]
                                                        int xRegion = center.X.Map(0, width, 0, 3);
                                                        //find the ball which region on the screen vertically [0-5]
                                                        int yRegion = center.Y.Map(0, height, 0, 3);
                                                        //find the ball is too far from camera or too close
                                                        int zRegion = ((int)radius).Map(10, 400, 0, 3);

                                                        lastSeen = FindBallCoordinates((int)radius, center, new Point(width / 2, height / 2));

                                                        #region Automatic Decisions

                                                        Command command = 0;

                                                        if (autoPilot)
                                                        {
                                                            if (xRegion < 1 && yRegion < 1)
                                                            {
                                                                command |= Command.CamLeft | Command.CamUp;
                                                            }
                                                            else if (xRegion < 1 && yRegion > 1)
                                                            {
                                                                command |= Command.CamLeft | Command.CamDown;
                                                            }
                                                            else if (xRegion > 1 && yRegion < 1)
                                                            {
                                                                command |= Command.CamRight | Command.CamUp;
                                                            }
                                                            else if (xRegion > 1 && yRegion > 1)
                                                            {
                                                                command |= Command.CamRight | Command.CamDown;
                                                            }
                                                            else if (xRegion < 1)
                                                            {
                                                                command |= Command.CamLeft;
                                                            }
                                                            else if (xRegion > 1)
                                                            {
                                                                command |= Command.CamRight;
                                                            }
                                                            else if (yRegion < 1)
                                                            {
                                                                command |= Command.CamUp;
                                                            }
                                                            else if (yRegion > 1)
                                                            {
                                                                command |= Command.CamDown;
                                                            }
                                                            if (zRegion > 1)
                                                            {
                                                                command |= Command.MoveBackward;
                                                            }
                                                            else if (zRegion < 1)
                                                            {
                                                                command |= Command.MoveForward;
                                                            }

                                                            byte[] message = { (byte)command };
                                                            serial.Write(message, 0, 1);
                                                        }

                                                        else if (camAuto)
                                                        {
                                                            if (xRegion < 1 && yRegion < 1)
                                                            {
                                                                command |= Command.CamLeft | Command.CamUp;
                                                            }
                                                            else if (xRegion < 1 && yRegion > 1)
                                                            {
                                                                command |= Command.CamLeft | Command.CamDown;
                                                            }
                                                            else if (xRegion > 1 && yRegion < 1)
                                                            {
                                                                command |= Command.CamRight | Command.CamUp;
                                                            }
                                                            else if (xRegion > 1 && yRegion > 1)
                                                            {
                                                                command |= Command.CamRight | Command.CamDown;
                                                            }
                                                            else if (xRegion < 1)
                                                            {
                                                                command |= Command.CamLeft;
                                                            }
                                                            else if (xRegion > 1)
                                                            {
                                                                command |= Command.CamRight;
                                                            }
                                                            else if (yRegion < 1)
                                                            {
                                                                command |= Command.CamUp;
                                                            }
                                                            else if (yRegion > 1)
                                                            {
                                                                command |= Command.CamDown;
                                                            }

                                                            byte[] message = { (byte)(command | (commandTaken & Command.MoveAuto)) };
                                                            serial.Write(message, 0, 1);
                                                        }
                                                        else if (moveAuto)
                                                        {
                                                            if (zRegion > 1 && xRegion < 1)
                                                            {
                                                                command |= Command.MoveForward | Command.MoveLeft;
                                                            }
                                                            else if (zRegion > 1 && xRegion > 1)
                                                            {
                                                                command |= Command.MoveForward | Command.MoveRight;
                                                            }
                                                            else if (zRegion < 1 && xRegion < 1)
                                                            {
                                                                command |= Command.MoveBackward | Command.MoveRight;
                                                            }
                                                            else if (zRegion < 1 && xRegion > 1)
                                                            {
                                                                command |= Command.MoveBackward | Command.MoveLeft;
                                                            }
                                                            else if (zRegion > 1)
                                                            {
                                                                command |= Command.MoveBackward;
                                                            }
                                                            else if (zRegion < 1)
                                                            {
                                                                command |= Command.MoveForward;
                                                            }
                                                            else if (xRegion < 1)
                                                            {
                                                                command |= Command.MoveLeft;
                                                            }
                                                            else if (xRegion > 1)
                                                            {
                                                                command |= Command.MoveRight;
                                                            }

                                                            byte[] message = { (byte)(command | (commandTaken & Command.CamAuto)) };
                                                            serial.Write(message, 0, 1);
                                                        }

                                                        else if (none)
                                                        {
                                                            byte[] message = { (byte)(commandTaken) };
                                                            serial.Write(message, 0, 1);
                                                        }


                                                        #endregion
                                                    }
                                                    else
                                                    {
                                                        Console.WriteLine($"{lastSeen.ToString()}");
                                                    }
                                                }

                                                Cv2.ImEncode(".jpg", imgMatrix, out byte[] result, encoderParam);
                                                writer.Write(result.Length);
                                                stream.Write(result, 0, result.Length);
                                            }
        private void BtnCaptureStart_Click(object sender, RoutedEventArgs e)
        {
            this.MyVideoCapture = new VideoCapture();

            bool result = this.MyVideoCapture.CreateCapture();
            if (!result)
            {
                MessageBox.Show("キャプチャの作成に失敗しました。");
            }

            result = this.MyVideoCapture.StartCapture();
            if (!result)
            {
                MessageBox.Show("キャプチャの開始に失敗しました。");
            }

            this.NotifyPropertyChanged("IsCaptureStartButtonEnabled");
            this.NotifyPropertyChanged("IsCaptureEndButtonEnabled");
        }
Esempio n. 52
0
        /// <param name="parameters">
        ///     [0] - Path to image,
        ///     [1] - Cascade 1,
        ///     [2] - Cascade 2 ...
        /// </param>
        private actionResult FindObjects(string[] parameters)
        {
            var ar = new actionResult();

            #region Parameters parsing

            switch (parameters.Length)
            {
            case 0: {
                ar.setError("Path to image not specified.");
                return(ar);
            }

            case 1: {
                ar.setError("Cascade name not specified.");
                return(ar);
            }
            }

            Image <Gray, byte> grayImage;
            if (string.IsNullOrEmpty(parameters[0]))
            {
                using (var capture = new VideoCapture()) {
                    Thread.Sleep(PluginOptions.CameraDelayMs);
                    grayImage = capture.QueryFrame().ToImage <Gray, byte>();
                }
            }
            else
            {
                try {
                    grayImage = new Image <Gray, byte>(parameters[0]);
                }
                catch {
                    ar.setError("Invalid path to image.");
                    return(ar);
                }
            }

            if (PluginOptions.UseImageCorrection)
            {
                grayImage._EqualizeHist();
            }

            #endregion

            var resultString = "";
            for (var i = 1; i < parameters.Length; i++)
            {
                if (string.IsNullOrEmpty(parameters[i]))
                {
                    continue;
                }

                using (var classifier = new CascadeClassifier($"{PluginOptions.CascadesPath}haarcascade_{parameters[i].ToLower().Trim()}.xml")) {
                    Rectangle[] objects = classifier.DetectMultiScale(grayImage, 1.1, 10);
                    if (objects.Length != 0)
                    {
                        for (var index = 0; index < objects.Length; index++)
                        {
                            grayImage.Draw(objects[index], new Gray(0), 2);
                        }
                    }
                    resultString += $"<{parameters[i]}:{objects.Length}>\n";
                }
            }
            SaveImage(grayImage.Bitmap);
            grayImage.Dispose();
            ar.setSuccess(resultString);
            return(ar);
        }
        // Use this for initialization
        void Start()
        {
            capture = new VideoCapture ();

            #if UNITY_WEBGL && !UNITY_EDITOR
            StartCoroutine(Utils.getFilePathAsync("768x576_mjpeg.mjpeg", (result) => {
                capture.open (result);
                Init();
            }));
            #else
            capture.open (Utils.getFilePath ("768x576_mjpeg.mjpeg"));
            Init ();
            #endif
        }
Esempio n. 54
0
        private actionResult TeserractOcr(string[] parameters)
        {
            var ar = new actionResult();

            #region Parameters parsing

            switch (parameters.Length)
            {
            case 0: {
                ar.setError("Path to image not specified.");
                return(ar);
            }

            case 1: {
                ar.setError("Symbols culture not specified.");
                return(ar);
            }
            }

            Image <Gray, byte> grayImage;
            if (string.IsNullOrEmpty(parameters[0]))
            {
                using (var capture = new VideoCapture()) {
                    Thread.Sleep(PluginOptions.CameraDelayMs);
                    grayImage = capture.QueryFrame().ToImage <Gray, byte>();
                }
            }
            else
            {
                try {
                    grayImage = new Image <Gray, byte>(parameters[0]);
                }
                catch {
                    ar.setError("Invalid path to image.");
                    return(ar);
                }
            }

            if (PluginOptions.UseImageCorrection)
            {
                grayImage._EqualizeHist();
            }

            #endregion

            using (Tesseract tesseract = parameters.Length == 2
                                             ? new Tesseract($"{PluginOptions.PluginPath}TessData\\", parameters[1], OcrEngineMode.TesseractOnly)
                                             : parameters.Length == 3
                                                 ? new Tesseract(
                       $"{PluginOptions.PluginPath}TessData\\", parameters[1], OcrEngineMode.TesseractOnly,
                       parameters[2]
                       )
                                                 : null) {
                if (tesseract == null)
                {
                    ar.setError("Failed to initialize recognizer due to invalid number of parameters.");
                    grayImage.Dispose();
                    return(ar);
                }

                string recognizedText;
                using (Image <Gray, byte> imgThold = grayImage) {
                    CvInvoke.Threshold(grayImage, imgThold, 140, 255, ThresholdType.Binary);
                    tesseract.SetImage(imgThold);
                    recognizedText = tesseract.GetUTF8Text();
                }
                if (string.IsNullOrWhiteSpace(recognizedText))
                {
                    ar.setError("No recognized symbols.");
                }
                else
                {
                    ar.setSuccess(recognizedText);
                }
                grayImage.Dispose();
            }
            return(ar);
        }
Esempio n. 55
0
 public void GetVideoCapture(VideoCapture.VideoCapture videoCapture)
 {
     _videoCapture = videoCapture;
 }
Esempio n. 56
0
        private actionResult TrainFace(string[] parameters)
        {
            var ar = new actionResult();

            #region Parameters parsing

            switch (parameters.Length)
            {
            case 0: {
                ar.setError("Path to image not specified.");
                return(ar);
            }

            case 1: {
                ar.setError("Face name not specified.");
                return(ar);
            }
            }

            Image <Gray, byte> grayImage;
            if (string.IsNullOrEmpty(parameters[0]))
            {
                using (var capture = new VideoCapture()) {
                    Thread.Sleep(PluginOptions.CameraDelayMs);
                    grayImage = capture.QueryFrame().ToImage <Gray, byte>();
                }
            }
            else
            {
                try {
                    grayImage = new Image <Gray, byte>(parameters[0]);
                }
                catch {
                    ar.setError("Invalid path to image.");
                    return(ar);
                }
            }

            if (PluginOptions.UseImageCorrection)
            {
                grayImage._EqualizeHist();
            }

            #endregion

            Rectangle[] faces;
            using (var classifier = new CascadeClassifier($"{PluginOptions.CascadesPath}haarcascade_frontalface_default.xml")) {
                faces = classifier.DetectMultiScale(grayImage, 1.1, 10);
            }
            if (faces.Length == 0)
            {
                ar.setError("No face recognized.");
                return(ar);
            }
            using (Image <Gray, byte> faceImage = grayImage.Copy(faces[0]).Resize(100, 100, Inter.Cubic)) {
                MainCtl.TrainedImages.Add(faceImage);
                PluginOptions.PeopleFaces.Add(PluginOptions.PeopleFaces.Count + 1, parameters[1]);
                faceImage.Save($"{PluginOptions.PluginPath}Faces\\face{MainCtl.TrainedImages.Count}.bmp");
            }

            PluginOptions.SaveOptionsToXml();
            grayImage.Dispose();

            using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                recognizer.Train(MainCtl.TrainedImages.ToArray(), PluginOptions.PeopleFaces.Keys.ToArray());
                recognizer.Write($"{PluginOptions.PluginPath}SavedCascade.xml");
            }
            ar.setInfo($"Added face with name: {parameters[0]}.");
            return(ar);
        }
Esempio n. 57
0
 public void GetClassHandles(VideoCapture.VideoCapture inVideo, Write inMax, Output.TitanOne.Write inTo, VideoResolution inVid)
 {
     Class.VideoCapture = inVideo;
     Class.CronusPlus = inMax;
     Class.TitanOne = inTo;
     Class.VideoResolution = inVid;
 }
Esempio n. 58
0
        private actionResult RecognizeFace(string[] parameters)
        {
            var ar = new actionResult();

            if (MainCtl.TrainedImages.Count == 0)
            {
                ar.setError("Database contains no trained faces.");
                return(ar);
            }

            #region Parameters parsing

            switch (parameters.Length)
            {
            case 0: {
                ar.setError("Path to image not specified.");
                return(ar);
            }

            case 1: {
                ar.setError("Face name not specified.");
                return(ar);
            }
            }

            Image <Gray, byte> grayImage;
            if (string.IsNullOrEmpty(parameters[0]))
            {
                using (var capture = new VideoCapture()) {
                    Thread.Sleep(PluginOptions.CameraDelayMs);
                    grayImage = capture.QueryFrame().ToImage <Gray, byte>();
                }
            }
            else
            {
                try {
                    grayImage = new Image <Gray, byte>(parameters[0]);
                }
                catch {
                    ar.setError("Invalid path to image.");
                    return(ar);
                }
            }

            if (PluginOptions.UseImageCorrection)
            {
                grayImage._EqualizeHist();
            }

            #endregion

            Rectangle[] faces;
            using (var classifier = new CascadeClassifier(PluginOptions.CascadesPath + "haarcascade_frontalface_default.xml")) {
                faces = classifier.DetectMultiScale(grayImage, 1.1, 10);
            }
            if (faces.Length == 0)
            {
                ar.setError("No trained faces found.");
                return(ar);
            }

            var resultString = "";
            foreach (Rectangle face in faces)
            {
                using (FaceRecognizer recognizer = new EigenFaceRecognizer()) {
                    recognizer.Read(PluginOptions.PluginPath + "SavedCascade.xml");
                    FaceRecognizer.PredictionResult recoResult = recognizer.Predict(grayImage.Copy(face).Resize(100, 100, Inter.Cubic));
                    resultString += $"<{PluginOptions.PeopleFaces.ElementAt(recoResult.Label)}:{recoResult.Distance}>";
                }
            }
            grayImage.Dispose();
            ar.setSuccess(resultString);
            return(ar);
        }
Esempio n. 59
0
 public static void InitCam(int cameraIndex = 0)
 {
     Camera._cameraCapture = new VideoCapture(cameraIndex);
 }
Esempio n. 60
0
        private static void Main()
        {
            try
            {
                // 定义图像捕捉方式 从摄像头 , 注意 Windows下需要选择 VideoCaptureAPIs.DSHOW
                var cap = new VideoCapture(0, VideoCaptureAPIs.DSHOW);

                // 定义图像捕捉方式 从摄像头 视频文件
                //var cap = new VideoCapture("video.webm");

                //判断捕捉设备是否打开
                if (!cap.IsOpened())
                {
                    Console.WriteLine("Unable to connect to camera");
                    return;
                }

                Mat temp = null;

                //定义显示窗口
                using (var win = new ImageWindow())
                {
                    //读取人脸检测和标注模型
                    using (var detector = Dlib.GetFrontalFaceDetector())
                        using (var poseModel = ShapePredictor.Deserialize("shape_predictor_68_face_landmarks.dat"))
                        {
                            // 主窗口是否关闭
                            while (!win.IsClosed())
                            {
                                //System.Threading.Thread.Sleep(100);
                                //获得1帧图片
                                temp = cap.RetrieveMat();// new Mat();


                                if (temp == null)
                                {
                                    break;
                                }


                                //将 OPENCV 图像数据 转换为 DILB 图像格式
                                var array = new byte[temp.Width * temp.Height * temp.ElemSize()];
                                Marshal.Copy(temp.Data, array, 0, array.Length);
                                using (var cimg = Dlib.LoadImageData <BgrPixel>(array, (uint)temp.Height, (uint)temp.Width, (uint)(temp.Width * temp.ElemSize())))
                                {
                                    // 人脸检测
                                    var faces = detector.Operator(cimg);
                                    //标注人脸
                                    var shapes = new List <FullObjectDetection>();
                                    for (var i = 0; i < faces.Length; ++i)
                                    {
                                        var det = poseModel.Detect(cimg, faces[i]);
                                        shapes.Add(det);
                                    }

                                    //显示
                                    win.ClearOverlay();
                                    win.SetImage(cimg);
                                    var lines = Dlib.RenderFaceDetections(shapes);
                                    win.AddOverlay(lines);

                                    foreach (var line in lines)
                                    {
                                        line.Dispose();
                                    }
                                }
                            }
                        }
                }
            }
            //catch (serialization_error&e)
            //{

            //    cout << "需要下载识别模型   http://dlib.net/files/shape_predictor_68_face_landmarks.dat.bz2" << endl;
            //    cout << endl << e.what() << endl;
            //}
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }