コード例 #1
0
ファイル: Program.cs プロジェクト: leekihak/opencvsharp
        private static void VideoCaptureSample()
        {
            var cap = new VideoCapture(0);

            if (!cap.IsOpened())
            {
                Console.WriteLine("Can't use camera.");
                return;
            }

            var frame = new Mat();
            cap.Grab();
            NativeMethods.videoio_VideoCapture_operatorRightShift_Mat(cap.CvPtr, frame.CvPtr);
            
            Window.ShowImages(frame);

            using (var window = new Window("window"))
            {
                while (true)
                {
                    cap.Read(frame);
                    window.ShowImage(frame);
                    int key = Cv2.WaitKey(50);
                    if (key == 'b')
                        break;
                }
            }
        }
コード例 #2
0
        public void Run()
        {
            var capture = new VideoCapture();
            capture.Set(CaptureProperty.FrameWidth, 640);
            capture.Set(CaptureProperty.FrameHeight, 480);
            capture.Open(-1);
            if (!capture.IsOpened())
                throw new Exception("capture initialization failed");

            var fs = FrameSource.CreateCameraSource(-1);
            var sr = SuperResolution.CreateBTVL1();
            sr.SetInput(fs);

            using (var normalWindow = new Window("normal"))
            using (var srWindow = new Window("super resolution"))
            {
                var normalFrame = new Mat();
                var srFrame = new Mat();
                while (true)
                {
                    capture.Read(normalFrame);
                    sr.NextFrame(srFrame);
                    if (normalFrame.Empty() || srFrame.Empty())
                        break;
                    normalWindow.ShowImage(normalFrame);
                    srWindow.ShowImage(srFrame);
                    Cv2.WaitKey(100);
                }
            }
        }
コード例 #3
0
 public void stopCamera()
 {
     if (capture.IsOpened())
     {
         capture.Release();
         if (cameraThread != null && cameraThread.IsAlive)
         {
             cameraThread.Abort();
             cameraThread.Join();
         }
         writer.Release();
     }
     if (writer.IsOpened())
     {
         writer.Release();
     }
 }
コード例 #4
0
 private Camera()
 {
     capture = new VideoCapture(0);
     if (!capture.IsOpened())
     {
         throw new Exception();
     }
 }
コード例 #5
0
        public void player()
        {
            string       path     = AppDomain.CurrentDomain.BaseDirectory;
            string       fullpath = System.IO.Path.Combine(path, "cheerleading_0001.mp4");
            VideoCapture video    = VideoCapture.FromFile(fullpath);
            var          asfe     = video.Get(CaptureProperty.FrameCount);

            if (!video.IsOpened())
            {
                MessageBox.Show("not open");
                return;
            }

            video.Set(CaptureProperty.FrameWidth, video.FrameWidth);
            video.Set(CaptureProperty.FrameHeight, video.FrameHeight);
            double fps = video.Get(CaptureProperty.Fps);

            //프레임 진행
            int count = 0;

            DateTime start = DateTime.Now;

            while (true)
            {
                Mat frame = new Mat();

                if (video.Read(frame))
                {
                    if (frame.Width == 0 && frame.Height == 0)
                    {
                        break;
                    }

                    count++;

                    TimeSpan playTime   = DateTime.Now - start;
                    TimeSpan targetTime = TimeSpan.FromSeconds(count / fps);

                    if (targetTime < playTime)
                    {
                        //Console.WriteLine($"{playTime}, {targetTime}");
                        continue;
                    }

                    Dispatcher.Invoke(new Action(delegate()
                    {
                        var a = WriteableBitmapConverter.ToWriteableBitmap(frame, 96, 96, PixelFormats.Bgr24, null);
                        //img_player.Source = a;
                    }));

                    playTime = DateTime.Now - start;
                    if (targetTime > playTime)
                    {
                        Thread.Sleep(targetTime - playTime);
                    }
                }
            }
        }
コード例 #6
0
        public static void DetectHand()
        {
            var cap = new VideoCapture();

            cap.Open(0);
            while (cap.IsOpened())
            {
                try
                {
                    var image = new Mat();
                    cap.Read(image);
                    var mask_img = Skinmask(image);
                    OpenCvSharp.Point[] contour;
                    var hull = GetCNTHull(mask_img, out contour);
                    image.DrawContours(new List <OpenCvSharp.Point[]>()
                    {
                        contour
                    }, -1, new Scalar(255, 255, 0), 2);
                    image.DrawContours(new List <OpenCvSharp.Point[]>()
                    {
                        hull
                    }, -1, new Scalar(0, 255, 255), 2);
                    var defects = GetDefects(contour);
                    if (defects.Length != 0)
                    {
                        var cnt = 0;
                        for (int i = 0; i < defects.Length; i++)
                        {
                            var start = contour[defects[i][0]];
                            var end   = contour[defects[i][1]];
                            var far   = contour[defects[i][2]];
                            var a     = Math.Sqrt(Math.Pow(end.X - start.X, 2) + Math.Pow(end.Y - start.Y, 2));
                            var b     = Math.Sqrt(Math.Pow(far.X - start.X, 2) + Math.Pow(far.Y - start.Y, 2));
                            var c     = Math.Sqrt(Math.Pow(end.X - far.X, 2) + Math.Pow(end.Y - far.Y, 2));
                            var angle = Math.Acos((b * b + c * c - a * a) / (2 * b * c));
                            if (angle <= Math.PI / 2.0)
                            {
                                cnt++;
                                Cv2.Circle(image, far, 4, new Scalar(0, 0, 255), -1);
                            }
                        }
                        if (cnt > 0)
                        {
                            cnt++;
                        }
                        image.PutText(cnt.ToString(), new OpenCvSharp.Point(0, 50), HersheyFonts.HersheySimplex, 1, new Scalar(255, 0, 0), 2, LineTypes.AntiAlias);
                    }
                    Cv2.ImShow("Detection", image);
                }
                catch (Exception) { }
                if (Cv2.WaitKey(1) == 113)
                {
                    break;
                }
            }
            cap.Release();
            Cv2.DestroyAllWindows();
        }
コード例 #7
0
ファイル: VideoSamplier.cs プロジェクト: l0nley/tello
        private void Worker(object p)
        {
            var tpl          = (Tuple <object, object>)p;
            var setter       = (Action <Mat>)tpl.Item1;
            var hb           = (Action)tpl.Item2;
            var lastHb       = DateTime.Now;
            var frameCounter = 0;

            try
            {
                var streamAddress = "udp://@0.0.0.0:11111";
                var s             = new VideoCapture();

                while (true)
                {
                    var elapsed = DateTime.Now.Subtract(lastHb).TotalSeconds;
                    if (elapsed >= 1)
                    {
                        hb();
                        SamplingRate = frameCounter;
                        lastHb       = DateTime.Now;
                        frameCounter = 0;
                    }

                    if (s.IsOpened() == false)
                    {
                        s.Open(streamAddress);
                        Connection = ConnectionState.Disconnected;
                    }
                    else
                    {
                        Connection = ConnectionState.Connected;
                    }
                    var mat = new Mat();
                    try
                    {
                        if (s.Read(mat))
                        {
                            frameCounter++;
                            LastTimeReceived = DateTime.Now;
                            setter(mat);
                        }
                    }
                    catch (ThreadAbortException)
                    {
                        throw;
                    }
                    catch
                    {
                    }
                }
            }
            catch (ThreadAbortException)
            {
                return;
            }
        }
コード例 #8
0
 /// <summary>
 /// コンストラクタでカメラを起動しframeに渡す
 /// </summary>
 public Camera()
 {
     capture = new VideoCapture(0);
     if (!capture.IsOpened())
     {
         throw new Exception("Camera Initialize failed");
     }
     frame = new Mat();
 }
コード例 #9
0
ファイル: Form1.cs プロジェクト: karayakar/WebcamWinForm
        private void recordingTimer_Tick(object sender, EventArgs e)
        {
            if (capture.IsOpened())
            {
                try
                {
                    frame = new Mat();
                    capture.Read(frame);
                    if (frame != null)
                    {
                        if (imageAlternate == null)
                        {
                            isUsingImageAlternate = true;
                            imageAlternate        = BitmapConverter.ToBitmap(frame);
                        }
                        else if (image == null)
                        {
                            isUsingImageAlternate = false;
                            image = BitmapConverter.ToBitmap(frame);
                        }

                        pictureBox1.Image = isUsingImageAlternate ? imageAlternate : image;

                        outputVideo.Write(frame);
                    }
                }
                catch (Exception)
                {
                    pictureBox1.Image = null;
                }
                finally
                {
                    if (frame != null)
                    {
                        frame.Dispose();
                    }

                    if (isUsingImageAlternate && image != null)
                    {
                        image.Dispose();
                        image = null;
                    }
                    else if (!isUsingImageAlternate && imageAlternate != null)
                    {
                        imageAlternate.Dispose();
                        imageAlternate = null;
                    }
                }

                if (isMicrophoneJustStarted)
                {
                    audioRecorder.StartRecording();
                    isMicrophoneJustStarted = false;
                }
            }
        }
コード例 #10
0
        public Form1()
        {
            InitializeComponent();

            for (int i = 0, max = 10; i < max; i++)
            {
                //カメラ画像取得用のVideoCapture作成
                capture = new VideoCapture(i); // 0がインカメ, 1以降がウェブカメラ
                if (capture.IsOpened())
                {
                    break;
                }
            }
            //カメラ画像取得用のVideoCapture作成
            if (!capture.IsOpened())
            {
                MessageBox.Show("camera was not found!");
                Close();
                throw new Exception();
            }

            capture.FrameWidth  = WIDTH;
            capture.FrameHeight = HEIGHT;

            //取得先のMat作成
            frame = new Mat(HEIGHT, WIDTH, MatType.CV_8UC3);

            //表示用のBitmap作成
            bmp = new Bitmap(frame.Cols, frame.Rows, (int)frame.Step(), System.Drawing.Imaging.PixelFormat.Format24bppRgb, frame.Data);

            //PictureBoxを出力サイズに合わせる
            pictureBox1.Width  = frame.Cols;
            pictureBox1.Height = frame.Rows;

            //描画用のGraphics作成
            graphic = pictureBox1.CreateGraphics();

            //画像取得スレッド開始
            backgroundWorker1.RunWorkerAsync();

            // バーコードリーダーのインスタンスを作成
            reader = new BarcodeReader();
        }
コード例 #11
0
 private Camera()
 {
     capture = new VideoCapture(0);
     if (!capture.IsOpened())
     {
         throw new Exception();
     }
     //var codec = @"MJPG";
     //capture.Set(CaptureProperty.FourCC, VideoWriter.FourCC(codec[0], codec[1], codec[2], codec[3]));
 }
コード例 #12
0
 public void ForceOpenVideoCapture()
 {
     _captureInstance = _captureInstance ?? new VideoCapture(0);
     while (!_captureInstance.IsOpened())
     {
         Console.WriteLine("Video Capture being reopened.");
         _captureInstance.Open(0);
         Thread.Sleep(500);
     }
 }
コード例 #13
0
        private void CaptureCameraCallback()
        {
            //CamerasForm frm1 = new CamerasForm();
            //frm1.Show();
            var window = new Window("capture");

            capture     = new VideoCapture();
            capture.Fps = 30.0f;
            int sleepTime = (int)(1000 / capture.Fps);

            capture.FrameHeight = 720;
            capture.FrameWidth  = 1280;
            capture.Open(0);
            capture.XI_Timeout = 1.0;
            if (capture.IsOpened())
            {
                while (true)
                {
                    frame = new Mat();
                    capture.Read(frame);
                    if (frame.Empty())
                    {
                        break;
                    }
                    else
                    {
                        window.ShowImage(frame);
                        if (faceSave == true)
                        {
                            frame.SaveImage("X:/myFace.jpg");
                            faceSave = false;
                            Console.WriteLine("Сохранено");
                        }
                        //Cv2.Equals(frame, frame);
                        //
                        var haarCascade = new CascadeClassifier("X:/haarcascade_frontalface_alt.xml");
                        var lbpCascade  = new CascadeClassifier("X:/haarcascade_eye_tree_eyeglasses.xml");

                        // Detect faces
                        Mat img        = frame;
                        Mat haarResult = DetectFace(haarCascade, img);
                        //Mat lbpResult = DetectFace(lbpCascade, img);//Не работает почему то

                        Cv2.ImShow("Faces by Haar", haarResult);
                        //Cv2.ImShow("Faces by LBP", lbpResult);//Не рабтает почему то
                        //Cv2.WaitKey(0);
                        //Cv2.DestroyAllWindows();
                        //image = BitmapConverter.ToBitmap(frame);
                        //frm1.pictureBox1.Image = image;
                        //image = null;
                        Cv2.WaitKey(sleepTime);
                    }
                }
            }
        }
コード例 #14
0
        private void Timer1_Tick(object sender, EventArgs e)
        {
            if (videoCapture.IsOpened() == false)
            {
                log_write("카메라 연결 안됨");
                timer1.Stop();
            }


            Mat frame_img_source_01 = new Mat();

            videoCapture.Read(frame_img_source_01);


            Mat searchFace = new Mat();
            Mat temp_image = new Mat();

            Rect[] faces = FaceDetect(frame_img_source_01);

            for (int faceCnt = 0; faceCnt < faces.Length; faceCnt++)
            {
                //  log_write("faces:" + Convert.ToString(faces[faceCnt]));
                if (faces[faceCnt].Width > 100)
                {
                    Cv2.Resize(frame_img_source_01[faces[faceCnt]], searchFace, new OpenCvSharp.Size(160, 160));
                    byte[] imageBytes = searchFace.ToBytes(".bmp");
                    searchFace = Mat.FromImageData(imageBytes, ImreadModes.Grayscale);

                    var predictedGroupId = 0;
                    if (model1 != null)
                    {
                        predictedGroupId = model1.Predict(searchFace);
                    }

                    if (predictedGroupId == 0)
                    {
                        log_write("predictedGroupId:" + Convert.ToString("확인불가"));
                        log_write("predictedGroupId:" + Convert.ToString(predictedGroupId));
                        Cv2.DestroyAllWindows();
                    }
                    else
                    {
                        log_write("predictedGroupId:" + Convert.ToString(predictedGroupId));
                        log_write("성명:" + Convert.ToString(Face_images_info.FirstOrDefault(w => w.ImageGroupId == predictedGroupId).person_name));

                        image_maatching(searchFace, Face_images_info.FirstOrDefault(w => w.ImageGroupId == predictedGroupId).Image);
                    }

                    Cv2.Rectangle(frame_img_source_01, faces[faceCnt], Scalar.YellowGreen, 2);
                }
            }

            Cv2.Flip(frame_img_source_01, frame_img_source_01, FlipMode.Y);
            pictureBox1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(frame_img_source_01);
        }
コード例 #15
0
        public Mat GetFrameAsMat()
        {
            if (_capture.IsOpened())
            {
                var image = new Mat();
                _capture.Read(image);
                return(image);
            }

            throw new InvalidOperationException("Camera not initialized");
        }
コード例 #16
0
 private void btnOpen_Click(object sender, EventArgs e)
 {
     capture = new VideoCapture(0, VideoCaptureAPIs.DSHOW);
     if (!capture.IsOpened())
     {
         MessageBox.Show("打开摄像头失败");
         capture.Dispose();
         return;
     }
     backgroundWorker1.RunWorkerAsync();
 }
コード例 #17
0
        private void MainWindow_Loaded(object sender, System.Windows.RoutedEventArgs e)
        {
            capture.Open(0, VideoCaptureAPIs.ANY);
            if (!capture.IsOpened())
            {
                Close();
                return;
            }

            bkgWorker.RunWorkerAsync();
        }
コード例 #18
0
        public void Start()
        {
            capture.Open(videoUrl);
            if (!capture.IsOpened())
            {
                Console.WriteLine($"Camera from {videoUrl} is fail!");
                return;
            }

            task.Start();
        }
コード例 #19
0
        // 实时显示视频
        private void toolStripLabel1_Click(object sender, EventArgs e)
        {
            //string rtsp = "rtsp://*****:*****@192.168.0.108:554/cam/realmonitor?channel=1&subtype=0";
            this.cap.Open(0);
            //this.cap.Open(rtsp);

            if (cap.IsOpened())
            {
                MessageBox.Show("打开摄像头成功...");
            }
            else
            {
                return;
            }



            while (stop)
            {
                if (stop)
                {
                    Mat image = new Mat();
                    this.cap.Read(image);

                    if (image.Empty())
                    {
                        stop = !stop;
                    }
                    else
                    {
                        int sleepTime = (int)Math.Round(1000 / this.cap.Fps);
                        //原始视频
                        //       this.picMatWithFlag.BackgroundImage = image.ToBitmap();


                        Cv2.WaitKey(sleepTime);

                        if (this.frameIndexLoop % 5 == 0)
                        {
                            frameClone = image.Clone();
                            //   Random rd = new Random();
                            //   int random = rd.Next();
                            //   Plate_SVM.SaveCharSample(frameClone, PlateCategory.黑, "C:/Users/Myself/Desktop/车牌识别训练资源库--a/", "_" + random);
                        }

                        image.Release();//释放,
                    }
                }

                this.picMatWithFlag.Refresh();

                toolStripComboBox1_TextChanged(sender, e);
            }
        }
コード例 #20
0
        public static async Task <int> FindStartTime(VideoCapture videoFile, bool release)
        {
            if (!videoFile.IsOpened())
            {
                throw new IOException("File could not be opened");
            }
            if ((videoFile.FrameHeight != 768) || (videoFile.FrameWidth != 1024))
            {
                throw new VideoSizeException();
            }

            return(await Task.Run(() =>
            {
                Mat rawFrame = new Mat();
                Mat grayFrame = new Mat();
                Mat patch = new Mat();
                Mat correlationResult = new Mat();
                bool success = videoFile.Read(rawFrame);
                double startTime = -1;
                while (success)
                {
                    Cv2.CvtColor(rawFrame, grayFrame, ColorConversionCodes.RGB2GRAY);
                    // binarize the patch in which the TTL indicator appears
                    patch = grayFrame[745, 763, 1000, 1018].GreaterThan(196);

                    Cv2.MatchTemplate(patch, TTLMarker, correlationResult, TemplateMatchModes.CCoeffNormed);
                    double corrVal = correlationResult.At <float>(0, 0);
                    if (corrVal > 0.9)
                    {
                        startTime = videoFile.PosMsec;
                        break;
                    }
                    else
                    {
                        Vec3b sumPatch = grayFrame[745, 762, 1000, 1018].Sum().ToVec3b();
                        if ((sumPatch.Item0 + sumPatch.Item1 + sumPatch.Item2) - 137 * 18 * 18 == 0)                         // the pixels are all 137
                        {
                            // previous frame is start
                            startTime = videoFile.PosMsec - 1000.0 / videoFile.Fps;
                            break;
                        }
                    }

                    success = videoFile.Read(rawFrame);
                }

                if (release)
                {
                    videoFile.Release();
                }

                return (int)startTime;
            }));
        }
コード例 #21
0
        public void WaitAnyWindows()
        {
            using var capture = new VideoCapture("_data/image/blob/shapes%d.png");
            Assert.True(capture.IsOpened());

            var ex = Assert.Throws <OpenCVException>(() =>
            {
                var result = VideoCapture.WaitAny(new[] { capture }, out var readyIndex, 0);
            });

            Assert.Equal("VideoCapture::waitAny() is supported by V4L backend only", ex.ErrMsg);
        }
コード例 #22
0
 /// <summary>
 /// 開始傳輸畫面
 /// </summary>
 /// <param name="display">主視窗放圖片的物件</param>
 /// <param name="mainWindow">主視窗物件 用來更新UI</param>
 public static void Start()
 {
     webcam_stop = false;
     //開啟鏡頭
     webcam.Open(webcam_id);
     if (webcam.IsOpened())
     {
         Face.findFace_Timer.Start();
         Face.webcam_Is_Open_Or_Not = true;
         while (!webcam_stop)
         {
             //刷新畫面
             UI.UpdateDisplay(GetCameraImage());
             //等待
             Cv2.WaitKey(30);
         }
         //更新畫面
         UI.UpdateDisplay(null);
     }
     webcam.Release();
 }
コード例 #23
0
        public static void Main(string[] args)
        {
            // initiate camera detection
            state.IsMotionDetected = false;
            state.IsCameraRunning  = true;

            BackgroundWorker bw = new BackgroundWorker();

            bw.DoWork += (se, ev) =>
            {
                var frame   = new Mat();
                var capture = new VideoCapture(0);
                capture.Open(0);
                Console.WriteLine("Connecting to first webcam...");
                var motionDetector = GetDefaultMotionDetector();
                Console.WriteLine("Initialize motion detector...");
                int counter = 0;
                if (capture.IsOpened())
                {
                    while (state.IsCameraRunning)
                    {
                        if (counter > 30)
                        {
                            counter = 0;
                            motionDetector.Reset();
                            Console.WriteLine("Motion Detector Reset...");
                        }
                        counter++;
                        capture.Read(frame);
                        var image = BitmapConverter.ToBitmap(frame);
                        if (image == null)
                        {
                            break;
                        }
                        var motionLevel = motionDetector.ProcessFrame(image);

                        if (motionLevel > 0.50)
                        {
                            state.IsMotionDetected = true;
                        }
                        else
                        {
                            state.IsMotionDetected = false;
                        }
                        Console.WriteLine($"Motion Level: {motionLevel}");
                        Thread.Sleep(1000);
                    }
                }
            };
            bw.RunWorkerAsync();

            CreateHostBuilder(args).Build().Run();
        }
コード例 #24
0
        /// <summary>
        /// Initializes video capture for video files.
        /// </summary>
        /// <param name="file">Path to a video.</param>
        /// <returns>Return video file capture.</returns>
        private static VideoCapture InitializeVideoCapture(string file)
        {
            var capture = new VideoCapture(file);

            if (!capture.IsOpened())
            {
                Console.WriteLine("Unable to open video file {0}.", file);
                return(null);
            }

            return(capture);
        }
コード例 #25
0
        private async Task CaptureCamera(CancellationToken token)
        {
            if (capture == null)
            {
                capture = new VideoCapture(CaptureDevice.DShow, 1);
            }

            capture.Open(1);         //try to open 2nd webcam
            //await Task.Delay(10000);
            if (!capture.IsOpened()) //if it failes, open main
            {
                capture.Open(0);
            }

            if (capture.IsOpened())
            {
                while (!token.IsCancellationRequested)
                {
                    using MemoryStream memoryStream = capture.RetrieveMat().Flip(FlipMode.Y).ToMemoryStream();

                    await Application.Current.Dispatcher.InvokeAsync(() =>
                    {
                        var imageSource = new BitmapImage();

                        imageSource.BeginInit();
                        imageSource.CacheOption  = BitmapCacheOption.OnLoad;
                        imageSource.StreamSource = memoryStream;
                        imageSource.EndInit();

                        WebCamImage.Source = imageSource;
                    });

                    var bitmapImage = new Bitmap(memoryStream);

                    await ParseWebCamFrame(bitmapImage, token);
                }

                capture.Release();
            }
        }
コード例 #26
0
        private async Task captureThread()
        {
            var count = 0;

            try {
                var capture = new VideoCapture(deviceId);
                while (!cancelToken.IsCancellationRequested)
                {
                    if (capture.IsOpened())
                    {
                        Bitmap bitmap      = null;
                        var    normalFrame = new Mat();
                        capture.Read(normalFrame);

                        try {
                            bitmap = Effector?.ToBitmap(normalFrame);
                        }
                        catch (Exception e) {
                            error = e.Message;
                        }

                        if (bitmap == null)
                        {
                            bitmap = BitmapConverter.ToBitmap(normalFrame);
                        }
                        normalFrame.Dispose();

                        if (count == 0)
                        {
                            count++;
                            BeginInvoke(new MethodInvoker(() => {
                                if (this.bitmap != null)
                                {
                                    this.bitmap.Dispose();
                                }

                                this.bitmap = bitmap;
                                UpdateImage();
                                count--;
                            }));
                        }
                    }

                    await Task.Delay(1);
                }

                capture.Release();
            }
            catch (Exception e) {
                error = e.Message;
            }
        }
コード例 #27
0
ファイル: Camera.cs プロジェクト: mmcs-robotics/mapping_2015
        public Camera()
        {
            _capture = Capture();
            while (!_capture.IsOpened())
            {
                Logger.Warn("Не могу подключиться к камере");
                Thread.Sleep(1000);
                _capture = Capture();
            }

            Logger.Success(String.Format("Подключился к камере ({0}x{1})",
                _capture.FrameWidth, _capture.FrameHeight));
        }
コード例 #28
0
        public void DefineVideoSource(string sourcePath)
        {
            sourceVideoPath = sourcePath;
            videoCapture    = new VideoCapture(sourcePath);

            IsOpened = videoCapture.IsOpened();
            if (IsOpened)
            {
                MaxFrameCount     = videoCapture.FrameCount;
                CurrentFrameCount = 0;
                Fps = videoCapture.Fps;
            }
        }
コード例 #29
0
        private void BTN_Conn_Cam_Click(object sender, EventArgs e)
        {
            try
            {
                if (cbCam.Text == "")
                {
                    MessageBox.Show("Please Select the Availabe cam", "System Message");
                }
                else
                {
                    try
                    {
                        capture.Open(Convert.ToInt32(cbCam.Text));
                        if (capture.IsOpened())
                        {
                            btnStart.Enabled        = true;
                            TB_CAM.Text             = cbCam.Text;
                            BTN_Conn_Cam.Enabled    = false;
                            BTN_Disconn_Cam.Enabled = true;

                            cbCam.Enabled = false;

                            PB_Cam_OFF.Visible = false;
                            PB_Cam_ON.Visible  = true;

                            MessageBox.Show("CAM Open", "System Message");
                        }
                    }
                    catch (System.IO.IOException ex)
                    {
                        MessageBox.Show("Error: " + ex.ToString(), "ERROR");
                    }
                }
            }
            catch (UnauthorizedAccessException)
            {
                MessageBox.Show("Un authorized Access!!");
            }
        }
コード例 #30
0
        private void VideoCaptureForm_Load(object sender, EventArgs e)
        {
            capture.Open(0, VideoCaptureAPIs.ANY);
            if (!capture.IsOpened())
            {
                Close();
                return;
            }

            ClientSize = new System.Drawing.Size(capture.FrameWidth, capture.FrameHeight);

            backgroundWorker1.RunWorkerAsync();
        }
コード例 #31
0
        /// <summary>
        /// Initialize web cam capture.
        /// </summary>
        /// <returns>Returns web cam capture.</returns>
        private static VideoCapture InitializeCapture(int cameraIndex = 0)
        {
            VideoCapture capture = new VideoCapture();

            capture.Open(CaptureDevice.MSMF, cameraIndex);

            if (!capture.IsOpened())
            {
                Console.WriteLine("Unable to open capture.");
                return(null);
            }

            return(capture);
        }
コード例 #32
0
ファイル: Mainform.cs プロジェクト: gwangmin68/GitHubSfashfa
        private void frame_tick_Tick(object sender, EventArgs e)
        {
            if (isLoad)
            {
                sfashfa_start();
            }
            if (capture != null && capture.IsOpened())
            {
                capture.Read(frame);
                frame = Opencv_lib.rotateImage(frame, 90);
                frame = Opencv_lib.findupperbody(frame, colormode);

                opencv_viewer.ImageIpl = frame;
            }
        }
コード例 #33
0
ファイル: Form1.cs プロジェクト: YYMofROK/opencvSample_002
        private void Timer1_Tick(object sender, EventArgs e)
        {
            if (videoCapture.IsOpened() == false)
            {
                log_write("카메라 연결 안됨");
            }

            Mat frame_img_source_01 = new Mat();

            videoCapture.Read(frame_img_source_01);

            pictureBox1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(frame_img_source_01);

            // 그레이 스케일
            Mat frame_img_source_02 = new Mat();

            videoCapture.Read(frame_img_source_02);
            Mat frame_img_gray = new Mat();

            byte[] imageBytes = frame_img_source_02.ToBytes(".bmp");
            //byte[] imageBytes = frame_img_FlipY.ToBytes(".bmp");
            frame_img_gray    = Mat.FromImageData(imageBytes, ImreadModes.Grayscale);
            pictureBox1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(frame_img_gray);

            Cv2.EqualizeHist(frame_img_gray, frame_img_gray);
            var cascade = new CascadeClassifier("C://Users//dev-yym//source//repos//opencv_002//FaceML_Data//haarcascade_frontalface_alt.xml");
            //var nestedCascade = new CascadeClassifier("C://Users//dev-yym//source//repos//opencv_002//FaceML_Data//haarcascade_eye_tree_eyeglasses.xml");


            var faces = cascade.DetectMultiScale(
                image: frame_img_gray,
                scaleFactor: 1.1,
                minNeighbors: 2,
                flags: HaarDetectionType.DoRoughSearch | HaarDetectionType.ScaleImage,
                minSize: new OpenCvSharp.Size(30, 30)
                );

            log_write("Detected faces:" + Convert.ToString(faces.Length));


            for (int faceCnt = 0; faceCnt < faces.Length; faceCnt++)
            {
                log_write("faces:" + Convert.ToString(faces[faceCnt]));

                Cv2.Rectangle(frame_img_source_01, faces[faceCnt], Scalar.YellowGreen, 2);
            }
            pictureBox1.Image = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(frame_img_source_01);
        }
コード例 #34
0
ファイル: Program.cs プロジェクト: rayrrrr/opencvsharp
        private static void VideoCaptureSample()
        {
            var cap = new VideoCapture(0);

            if (!cap.IsOpened())
            {
                Console.WriteLine("Can't use camera.");
                return;
            }

            var frame = new Mat();

            using (var window = new Window("window"))
            {
                while (true)
                {
                    cap.Read(frame);
                    window.ShowImage(frame);
                    int key = Cv2.WaitKey(50);
                    if (key == 'b')
                        break;
                }
            }
        }
コード例 #35
0
ファイル: OpenCv.cs プロジェクト: Muraad/DynamoOpenCV
        public static List<Mat> PreviewCamera(bool savePictureMode)
        {
            // Opens a camera device
            var capture = new VideoCapture(0);
            if (!capture.IsOpened())
                return null;
            var returnedMat = new List<Mat>();

            var frame = new Mat();
            var returnedframe = new Mat();

            while (true)
            {
                // Read image
                capture.Read(frame);
                if (frame.Empty())
                    return null;

                Cv2.ImShow("Camera", frame);
                var key = Cv2.WaitKey(30);

                if (key == 27) //wait for esc key
                {
                    Cv2.DestroyAllWindows();
                    return returnedMat;
                }
                if (key == 115) //wait for s
                {
                    returnedframe = frame.Clone();
                    returnedMat.Add(returnedframe);
                }
            }
        }