Open() public method

Allocates and initialized the CvCapture structure for reading a video stream from the camera. Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394).
public Open ( CaptureDevice device ) : void
device CaptureDevice Device type
return void
コード例 #1
6
        public void Run()
        {
            var capture = new VideoCapture();
            capture.Set(CaptureProperty.FrameWidth, 640);
            capture.Set(CaptureProperty.FrameHeight, 480);
            capture.Open(-1);
            if (!capture.IsOpened())
                throw new Exception("capture initialization failed");

            var fs = FrameSource.CreateCameraSource(-1);
            var sr = SuperResolution.CreateBTVL1();
            sr.SetInput(fs);

            using (var normalWindow = new Window("normal"))
            using (var srWindow = new Window("super resolution"))
            {
                var normalFrame = new Mat();
                var srFrame = new Mat();
                while (true)
                {
                    capture.Read(normalFrame);
                    sr.NextFrame(srFrame);
                    if (normalFrame.Empty() || srFrame.Empty())
                        break;
                    normalWindow.ShowImage(normalFrame);
                    srWindow.ShowImage(srFrame);
                    Cv2.WaitKey(100);
                }
            }
        }
コード例 #2
0
        static void Main(string[] args)
        {
            //size of tag is in meters and camera parameters are obtained from calibration
            float  tag_size = 0.1F;
            float  fx       = 1200F;
            float  fy       = 1400F;
            double px       = 817.143;
            double py       = 387.159;

            //array of floats to carry values of image points (x and y * 4 points)
            float[] ptsry = new float[8];

            //initialize video capture from camera
            var capt = new OpenCvSharp.VideoCapture();

            capt.Open(0);

            //window for displaying video
            Window window = new Window("capture");

            //main task; display video and find tags
            using (Mat frame = new Mat())
            {
                //looping untill key is pressed
                while (true)
                {
                    //read from camera and show it
                    capt.Read(frame);
                    window.ShowImage(frame);

                    //detect tags and find how many and print the number
                    Apriltag ap           = new Apriltag("canny", true, "tag16h5");
                    var      current_tags = ap.detect(frame);
                    Console.WriteLine("Number of tags = " + current_tags.Count);
                    Console.WriteLine();

                    //sleep for 10 msec
                    System.Threading.Thread.Sleep(10);

                    //if a key is pressed, close the window and exit
                    if (Cv2.WaitKey(1) >= 0)
                    {
                        capt.Release();
                        Cv2.DestroyAllWindows();
                        break;
                    }
                }
            }
        }
コード例 #3
0
        private async Task CaptureCamera(CancellationToken token)
        {
            if (capture == null)
            {
                capture = new cv.VideoCapture(0, cv.VideoCaptureAPIs.ANY);
            }

            capture.Open(0);

            if (capture.IsOpened())
            {
                try
                {
                    while (!token.IsCancellationRequested)
                    {
                        using MemoryStream memoryStream = capture.RetrieveMat().ToMemoryStream();

                        await Application.Current.Dispatcher.InvokeAsync(() =>
                        {
                            try
                            {
                                var imageSource = new BitmapImage();

                                imageSource.BeginInit();
                                imageSource.CacheOption  = BitmapCacheOption.OnLoad;
                                imageSource.StreamSource = memoryStream;
                                imageSource.EndInit();

                                img_WebCam.Source = imageSource;
                            }catch (Exception e)
                            {
                            }
                        });

                        bitmapImage = new Bitmap(memoryStream);


                        //await ParseWebCamFrame(bitmapImage, token);
                    }

                    capture.Release();
                }
                catch (Exception e)
                {
                }
            }
        }
コード例 #4
0
        private async Task CaptureCamera(CancellationToken token)
        {
            if (capture == null)
            {
                capture = new OpenCvSharp.VideoCapture(CaptureDevice.DShow);
            }

            capture.Open(0);

            m_capture.Start();

            if (capture.IsOpened())
            //  if(m_capture.IsOpened)
            {
                while (!token.IsCancellationRequested)
                {
                    using MemoryStream memoryStream = capture.RetrieveMat().Flip(FlipMode.Y).ToMemoryStream();
                    //  using MemoryStream memoryStream = m_capture.QueryFrame()..RetrieveMat().Flip(FlipMode.Y).ToMemoryStream();


                    await Application.Current.Dispatcher.InvokeAsync(() =>
                    {
                        var imageSource = new BitmapImage();

                        imageSource.BeginInit();
                        imageSource.CacheOption  = BitmapCacheOption.OnLoad;
                        imageSource.StreamSource = memoryStream;
                        imageSource.EndInit();

                        OpenCVSharpImageSource.Source = imageSource;
                    });

                    var bitmapImage = new Bitmap(memoryStream);

                    await ParseWebCamFrame(bitmapImage, token);
                }

                capture.Release();
            }
        }
コード例 #5
0
        static void Main(string[] args)
        {
            //var vc = new OpenCvSharp.VideoCapture("./test/india.mp4");
            //var vc = new OpenCvSharp.VideoCapture("./test/Test.mov");
            //var vc = new OpenCvSharp.VideoCapture("./test/singleTest.m4v");

            //var vc = new OpenCvSharp.VideoCapture("./test/peopleTest.m4v");
            var vc = new OpenCvSharp.VideoCapture();

            vc.Open(1);
            ImageRecognizer imageRecognizer = new ImageRecognizer();
            int             key             = int.MinValue;

            using (Window window = new Window("capture"))
            {
                while (key < 0)
                {
                    vc.Grab();
                    var mat = vc.RetrieveMat();
                    if (mat.Empty())
                    {
                        return;
                    }

                    var faces = imageRecognizer.DetectFaces(mat);
                    if (faces != null)
                    {
                        foreach (var face in faces)
                        {
                            var faceCrop = new Mat(mat, face);
                            faceCrop.SaveImage($"./results/{Guid.NewGuid()}.jpg");
                        }
                    }

                    window.ShowImage(mat);
                    key = Cv2.WaitKey(10);
                }
            }
        }
コード例 #6
0
        static void Main(string[] args)
        {
            var faceengine = new FaceEngine(ASF_RegisterOrNot.ASF_REGISTER, 2);

            //faceengine.OnlineActivation("", "", "");
            FaceEngine.OnlineActivationFree("", "");
            //faceengine.OfflineActivation();


            faceengine.InitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_0_ONLY, 1, ASF_Mask.ASF_FACE_DETECT | ASF_Mask.ASF_FACERECOGNITION | ASF_Mask.ASF_AGE | ASF_Mask.ASF_LIVENESS);
            Console.WriteLine(faceengine.Version.BuildDate);
            Console.WriteLine(faceengine.Version.CopyRight);
            Console.WriteLine(faceengine.Version.Version);
            OpenCvSharp.VideoCapture videoCapture = new OpenCvSharp.VideoCapture();
            videoCapture.Open(0);

            var activeFile = FaceEngine.GetActiveFileInfo();


            ////Console.WriteLine(FaceEngine.GetActiveDeviceInfo());
            Stopwatch stopwatch = new Stopwatch();

            stopwatch.Restart();
            //faceengine.InitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_ALL_OUT, 9,
            //    ASF_Mask.ASF_AGE | ASF_Mask.ASF_FACE3DANGLE | ASF_Mask.ASF_FACELANDMARK | ASF_Mask.ASF_FACERECOGNITION | ASF_Mask.ASF_FACESHELTER | ASF_Mask.ASF_FACE_DETECT |
            //     ASF_Mask.ASF_GENDER | ASF_Mask.ASF_IMAGEQUALITY | ASF_Mask.ASF_IR_LIVENESS | ASF_Mask.ASF_LIVENESS | ASF_Mask.ASF_MASKDETECT | ASF_Mask.ASF_UPDATE_FACEDATA);
            //Console.WriteLine($"引擎初始化: {stopwatch.ElapsedMilliseconds}ms");
            Mat mat = new Mat();

            //Mat mat = new Mat(@"C:\Users\Jch\Desktop\2.jpg");
            while (true)
            {
                stopwatch.Restart();

                if (videoCapture.Read(mat))
                {
                    using (var img = mat.ToBitmap())
                        using (var imgInfo = ImageInfo.ReadBMP(img))
                        {
                            Console.WriteLine($"图片处理:{stopwatch.ElapsedMilliseconds}ms");
                            stopwatch.Restart();
                            var detectResult = faceengine.DetectFacesEx(imgInfo);
                            Console.WriteLine($"人脸定位:{stopwatch.ElapsedMilliseconds}ms");
                            if (detectResult != null)
                            {
                                foreach (var item in detectResult.FaceInfos)
                                {
                                    Console.WriteLine($"Age: {item.Age}");
                                    Console.WriteLine($"FaceID: {item.FaceID}");
                                    Console.WriteLine($"FaceOrient: {item.FaceOrient}");
                                    Console.WriteLine($"FaceShelter: {item.FaceShelter}");
                                    Console.WriteLine($"Gender: {item.Gender}");
                                    Console.WriteLine($"LeftEyeClosed: {item.LeftEyeClosed}");
                                    Console.WriteLine($"Liveness: {item.Liveness}");
                                    Console.WriteLine($"Mask: {item.Mask}");
                                    Console.WriteLine($"RightEyeClosed: {item.RightEyeClosed}");
                                    Console.WriteLine($"WearGlasses: {item.WearGlasses}");
                                    Console.WriteLine($"FaceRect: bottom->{item.FaceRect.bottom} left->{item.FaceRect.left} right->{item.FaceRect.right} top->{item.FaceRect.top}");
                                    Console.WriteLine($"FaceLandmark: x->{item.FaceLandmark.x} y->{item.FaceLandmark.x}");
                                    Console.WriteLine($"Face3DAngle: {item.Face3DAngle.roll} {item.Face3DAngle.yaw} {item.Face3DAngle.pitch} {item.Face3DAngle.status}");
                                    stopwatch.Restart();
                                    var feature = faceengine.FaceFeatureExtractEx(imgInfo, item);
                                    Console.WriteLine($"提取特征值: {stopwatch.ElapsedMilliseconds}ms");
                                    if (feature != null)
                                    {
                                        Console.WriteLine($"feature: {feature.Size}");
                                    }
                                    Console.WriteLine(faceengine.FaceFeatureCompare(feature.ASFFaceFeature, feature.ASFFaceFeature));
                                    var score = faceengine.ImageQualityDetectEx(imgInfo, item);
                                    Console.WriteLine($"人脸质量: {score}");
                                    Console.WriteLine("--------------------------------------------");
                                }
                            }
                        }
                }
            }

            Console.ReadLine();
        }