Video capturing class
Inheritance: OpenCvSharp.DisposableCvObject
コード例 #1
6
        public void Run()
        {
            var capture = new VideoCapture();
            capture.Set(CaptureProperty.FrameWidth, 640);
            capture.Set(CaptureProperty.FrameHeight, 480);
            capture.Open(-1);
            if (!capture.IsOpened())
                throw new Exception("capture initialization failed");

            var fs = FrameSource.CreateCameraSource(-1);
            var sr = SuperResolution.CreateBTVL1();
            sr.SetInput(fs);

            using (var normalWindow = new Window("normal"))
            using (var srWindow = new Window("super resolution"))
            {
                var normalFrame = new Mat();
                var srFrame = new Mat();
                while (true)
                {
                    capture.Read(normalFrame);
                    sr.NextFrame(srFrame);
                    if (normalFrame.Empty() || srFrame.Empty())
                        break;
                    normalWindow.ShowImage(normalFrame);
                    srWindow.ShowImage(srFrame);
                    Cv2.WaitKey(100);
                }
            }
        }
コード例 #2
1
        private void EnumSources()
        {
            int sourceId = 0;

            this.Properties.CameraSourceIds.Add(-1);
            while (true)
            {
                try
                {
                    using (Mat image = new Mat())
                        using (OpenCvSharp.VideoCapture videoCapture = new OpenCvSharp.VideoCapture(OpenCvSharp.CaptureDevice.Any, sourceId))
                        {
                            if (videoCapture.IsOpened() && videoCapture.Read(image))
                            {
                                this.Properties.CameraSourceIds.Add(sourceId++);
                            }
                            else
                            {
                                break;
                            }
                        }
                }
                catch
                {
                    break;
                }
            }
        }
コード例 #3
1
ファイル: FileManager.cs プロジェクト: 0V/Face-Exchanger
        /// <summary>
        /// カメラから画像を取得します
        /// </summary>
        /// <param name="cameraIndex"></param>
        /// <returns></returns>
        public static Mat GetCameraImage(int cameraIndex = 0)
        {
            var frame = new Mat();
            using (var capture = new VideoCapture(0))
                capture.Read(frame);

            return frame;
        }
コード例 #4
0
    public OpencvSource(string cam_or_url)
    {
        MAssert.Check(cam_or_url != string.Empty);

        // check if cam_or_url is number
        bool stream = false;

        for (int i = 0; i < cam_or_url.Length; ++i)
        {
            stream = stream ||
                     (cam_or_url[i] < '0') ||
                     (cam_or_url[i] > '9');
        }

        if (stream)
        {
            // open stream
            Console.WriteLine("opening stream '{0}'", cam_or_url);
            capturer = new OpenCvSharp.VideoCapture(cam_or_url);
        }
        else
        {
            // convert to integer
            int cam_id = Convert.ToInt32(cam_or_url, 10);
            MAssert.Check(cam_id >= 0, "wrong webcam id");

            // open vebcam
            Console.WriteLine("opening webcam {0}", cam_id);
            capturer = new OpenCvSharp.VideoCapture(cam_id);
            MAssert.Check(capturer.IsOpened(), "webcam not opened");

            // set resolution
            capturer.Set(OpenCvSharp.CaptureProperty.FrameWidth, 1280);
            capturer.Set(OpenCvSharp.CaptureProperty.FrameHeight, 720);

            MAssert.Check(capturer.IsOpened(), "webcam not opened");
        }

        // sometimes first few frames can be empty even if camera is good
        // so skip few frames
        OpenCvSharp.Mat frame;
        for (int i = 0; i < 10; ++i)
        {
            frame = capturer.RetrieveMat();
        }
        // check first two frames
        OpenCvSharp.Mat image1 = new OpenCvSharp.Mat(), image2 = new OpenCvSharp.Mat();
        capturer.Read(image1);
        capturer.Read(image2);
        Console.WriteLine("image1 size: {0}", image1.Size());
        Console.WriteLine("image1 size: {0}", image2.Size());

        MAssert.Check(
            !image1.Empty() &&
            !image2.Empty() &&
            image1.Size() == image2.Size() &&
            image1.Type() == OpenCvSharp.MatType.CV_8UC3 &&
            image2.Type() == OpenCvSharp.MatType.CV_8UC3,
            "error opening webcam or stream");
    }
コード例 #5
0
ファイル: Fetcher.cs プロジェクト: fel88/SOBA
        private void toolStripButton1_Click(object sender, EventArgs e)
        {
            OpenFileDialog ofd = new OpenFileDialog();

            if (ofd.ShowDialog() != DialogResult.OK)
            {
                return;
            }
            cap  = new OpenCvSharp.VideoCapture(ofd.FileName);
            Text = $"Source: {ofd.FileName}";
        }
コード例 #6
0
        static void Main(string[] args)
        {
            //size of tag is in meters and camera parameters are obtained from calibration
            float  tag_size = 0.1F;
            float  fx       = 1200F;
            float  fy       = 1400F;
            double px       = 817.143;
            double py       = 387.159;

            //array of floats to carry values of image points (x and y * 4 points)
            float[] ptsry = new float[8];

            //initialize video capture from camera
            var capt = new OpenCvSharp.VideoCapture();

            capt.Open(0);

            //window for displaying video
            Window window = new Window("capture");

            //main task; display video and find tags
            using (Mat frame = new Mat())
            {
                //looping untill key is pressed
                while (true)
                {
                    //read from camera and show it
                    capt.Read(frame);
                    window.ShowImage(frame);

                    //detect tags and find how many and print the number
                    Apriltag ap           = new Apriltag("canny", true, "tag16h5");
                    var      current_tags = ap.detect(frame);
                    Console.WriteLine("Number of tags = " + current_tags.Count);
                    Console.WriteLine();

                    //sleep for 10 msec
                    System.Threading.Thread.Sleep(10);

                    //if a key is pressed, close the window and exit
                    if (Cv2.WaitKey(1) >= 0)
                    {
                        capt.Release();
                        Cv2.DestroyAllWindows();
                        break;
                    }
                }
            }
        }
        /// <inheritdoc/>
        public async Task StartAsync(int delay = 30)
        {
            OpenCvSharp.VideoCapture capture;

            switch (this.settings.VideoType)
            {
            case VideoType.Webcam:
                capture = new OpenCvSharp.VideoCapture(this.settings.WebcamIndex);
                break;

            case VideoType.Url:
                capture = new OpenCvSharp.VideoCapture(this.settings.Url);
                break;

            default:
                throw new ArgumentException("Invalid settings: video type for grabber not specified or wrong.");
            }

            using (Mat image = new Mat())
            {
                while (true)
                {
                    capture.Read(image);

                    if (image.Empty())
                    {
                        break;
                    }

                    int analysisWidth  = 320;
                    int analysisHeight = analysisWidth * image.Height / image.Width;
                    var analysisImage  = image.Clone().Resize(new Size(analysisWidth, analysisHeight));

                    using (var analysisStream = analysisImage.ToMemoryStream(".jpg", new ImageEncodingParam(ImwriteFlags.JpegQuality, 50)))
                        using (var displayStream = image.ToMemoryStream(".jpg", new ImageEncodingParam(ImwriteFlags.JpegQuality, 100)))
                        {
                            this.OnFrameGrabbed?.Invoke(
                                displayStream,
                                analysisStream,
                                "image/jpeg",
                                analysisImage.Width,
                                analysisImage.Height);
                        }

                    await Task.Delay(delay);
                }
            }
        }
コード例 #8
0
        void CloseCamera()
        {
            lock (m_sync)
            {
                if (m_videoCapture != null)
                {
                    m_videoCapture.Dispose();
                }
                m_videoCapture = null;

                if (m_image != null)
                {
                    m_image.Dispose();
                }
            }
        }
コード例 #9
0
        private async Task CaptureCamera(CancellationToken token)
        {
            if (capture == null)
            {
                capture = new cv.VideoCapture(0, cv.VideoCaptureAPIs.ANY);
            }

            capture.Open(0);

            if (capture.IsOpened())
            {
                try
                {
                    while (!token.IsCancellationRequested)
                    {
                        using MemoryStream memoryStream = capture.RetrieveMat().ToMemoryStream();

                        await Application.Current.Dispatcher.InvokeAsync(() =>
                        {
                            try
                            {
                                var imageSource = new BitmapImage();

                                imageSource.BeginInit();
                                imageSource.CacheOption  = BitmapCacheOption.OnLoad;
                                imageSource.StreamSource = memoryStream;
                                imageSource.EndInit();

                                img_WebCam.Source = imageSource;
                            }catch (Exception e)
                            {
                            }
                        });

                        bitmapImage = new Bitmap(memoryStream);


                        //await ParseWebCamFrame(bitmapImage, token);
                    }

                    capture.Release();
                }
                catch (Exception e)
                {
                }
            }
        }
コード例 #10
0
        public IActionResult Canny()
        {
            using (var capture = new OpenCvSharp.VideoCapture(CaptureDevice.Any, index: 0))
            {
                var fps = GetFps(capture);
                capture.Fps = fps;
                var interval = (int)(1000 / fps);

                using (Mat image = new Mat()) // Frame image buffer
                {
                    // When the movie playback reaches end, Mat.data becomes NULL.
                    while (true)
                    {
                        var frame = capture.Read(image);

                        if (image.Empty())
                        {
                            break;
                        }

                        // Load the cascade
                        var haarCascade = new CascadeClassifier("haarcascade_frontalface_alt2.xml");

                        // Load target image
                        var gray = new Mat("faces.png", ImreadModes.GrayScale);


                        // Detect faces
                        Rect[] faces = haarCascade.DetectMultiScale(gray, 1.08, 2, HaarDetectionType.ScaleImage, new Size(30, 30));



                        //byte[] cannyBytes = cannyImage.ToBytes(".png");
                        //string base64 = Convert.ToBase64String(cannyBytes);
                        //// ビュー変数に設定
                        //ViewBag.Base64Image = base64;

                        //window.ShowImage(image);
                        //Cv2.WaitKey(sleepTime);
                    }
                }
            }


            return(View());
        }
コード例 #11
0
ファイル: faces.cs プロジェクト: LeeChanHyuk/Unity_Project
    void Start()
    {
        gameObject    = GameObject.Find("Camera");
        colors        = new List <Scalar>();
        face_position = Cv2.ImRead("C:/Users/CheNik/Desktop/Face_position.png");
        colors.Add(new Scalar(0, 0, 100));
        colors.Add(new Scalar(0, 100, 0));
        colors.Add(new Scalar(0, 100, 100));
        colors.Add(new Scalar(100, 0, 0));
        colors.Add(new Scalar(100, 0, 100));
        colors.Add(new Scalar(100, 100, 0));
        colors.Add(new Scalar(100, 100, 100));
        colors.Add(new Scalar(0, 0, 150));
        colors.Add(new Scalar(0, 150, 0));
        colors.Add(new Scalar(0, 150, 150));
        colors.Add(new Scalar(150, 0, 0));
        colors.Add(new Scalar(150, 0, 150));
        colors.Add(new Scalar(150, 150, 0));
        colors.Add(new Scalar(150, 150, 150));
        colors.Add(new Scalar(0, 0, 180));
        colors.Add(new Scalar(0, 180, 0));
        colors.Add(new Scalar(0, 180, 180));
        v3 = new Vector3(0, -5, 2.4f);
        pixxy.transform.position = new Vector3(0f, 0f, 0f);
        importantImage           = new Mat();

        graph1 = new TFGraph();
        graph2 = new TFGraph();
        graph3 = new TFGraph();
        graph1.Import(File.ReadAllBytes("C:/Users/CheNik/Downloads/FSA-Net-master/FSA-Net-master/pre-trained/converted-models/tf/fsanet_capsule_3_16_2_21_5.pb"));
        graph2.Import(File.ReadAllBytes("C:/Users/CheNik/Downloads/FSA-Net-master/FSA-Net-master/pre-trained/converted-models/tf/fsanet_noS_capsule_3_16_2_192_5.pb"));
        graph3.Import(File.ReadAllBytes("C:/Users/CheNik/Downloads/FSA-Net-master/FSA-Net-master/pre-trained/converted-models/tf/fsanet_var_capsule_3_16_2_21_5.pb"));
        session1 = new TFSession(graph1);
        session2 = new TFSession(graph2);
        session3 = new TFSession(graph3);
        pipe     = new Pipeline();
        cfg      = new Config();
        cfg.EnableStream(Intel.RealSense.Stream.Depth, 640, 360, Format.Z16, 30);
        pipe.Start(cfg);
        image              = new Mat();
        image2             = new Mat();
        camera             = new VideoCapture(0);
        camera.FrameWidth  = 640;
        camera.FrameHeight = 360;
    }
コード例 #12
0
        private void LoadVideoFrames()
        {
            if (openCvCapture == null)
            {
                openCvCapture = new OpenCvSharp.VideoCapture(0);
            }
            OpenCvSharp.Mat frame        = new OpenCvSharp.Mat();
            bool            readingVideo = true;
            int             cnt          = 0;

            while (readingVideo)
            {
                if (openCvCapture.Read(frame))
                {
                    cnt++;
                    frame.SaveImage(@"d:\junk\testCamImages\image{cnt}.png");
                    byte[]      imagearray = frame.ImEncode(".png");
                    BitmapImage bmi        = new BitmapImage();
                    using (MemoryStream ms = new MemoryStream(imagearray))
                    {
                        ms.Position = 0;
                        bmi.BeginInit();
                        bmi.CacheOption  = BitmapCacheOption.OnLoad;
                        bmi.StreamSource = ms;
                        bmi.EndInit();
                    }


                    //this.ImageSource.Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.Normal,
                    //    new Action(() =>
                    //        {
                    //            ImageSource = bmi;
                    //        }));


                    //capture.
                    OpenCvSharp.Cv2.ImShow("video", frame);
                    int key = OpenCvSharp.Cv2.WaitKey(27);
                    if (key == 27)
                    {
                        readingVideo = false;
                    }
                }
            }
        }
コード例 #13
0
        public MainWindow()
        {
            InitializeComponent();

            var capture = new cv.VideoCapture(0);

            var src = cv.Cv2.CreateFrameSource_Camera(0);
            // Mat src = Cv2.ImRead("lenna.png", ImreadModes.Grayscale);
            var dst = new cv.Mat();

            cv.Cv2..

            //cv.Cv2.Canny(src, dst, 50, 200);
            using (new cv.Window("src image", src.))
                using (new cv.Window("dst image", dst))
                {
                    cv.Cv2.WaitKey();
                }
        }
コード例 #14
0
        void StartCamera()
        {
            lock (m_sync)
            {
                System.Diagnostics.Debug.Assert(m_videoCapture == null);

                if (m_videoCapture == null && this.Properties.CameraSourceId >= 0)
                {
                    try
                    {
                        m_videoCapture = new OpenCvSharp.VideoCapture(OpenCvSharp.CaptureDevice.Any, this.Properties.CameraSourceId);
                        m_image        = new Mat();
                    }
                    catch (Exception ex)
                    {
                        MessageBox.Show("Cannot start camera.\nException:\n" + ex.ToString());
                    }
                }
            }
        }
コード例 #15
0
        private async Task CaptureCamera(CancellationToken token)
        {
            if (capture == null)
            {
                capture = new OpenCvSharp.VideoCapture(CaptureDevice.DShow);
            }

            capture.Open(0);

            m_capture.Start();

            if (capture.IsOpened())
            //  if(m_capture.IsOpened)
            {
                while (!token.IsCancellationRequested)
                {
                    using MemoryStream memoryStream = capture.RetrieveMat().Flip(FlipMode.Y).ToMemoryStream();
                    //  using MemoryStream memoryStream = m_capture.QueryFrame()..RetrieveMat().Flip(FlipMode.Y).ToMemoryStream();


                    await Application.Current.Dispatcher.InvokeAsync(() =>
                    {
                        var imageSource = new BitmapImage();

                        imageSource.BeginInit();
                        imageSource.CacheOption  = BitmapCacheOption.OnLoad;
                        imageSource.StreamSource = memoryStream;
                        imageSource.EndInit();

                        OpenCVSharpImageSource.Source = imageSource;
                    });

                    var bitmapImage = new Bitmap(memoryStream);

                    await ParseWebCamFrame(bitmapImage, token);
                }

                capture.Release();
            }
        }
コード例 #16
0
        static void Main(string[] args)
        {
            //var vc = new OpenCvSharp.VideoCapture("./test/india.mp4");
            //var vc = new OpenCvSharp.VideoCapture("./test/Test.mov");
            //var vc = new OpenCvSharp.VideoCapture("./test/singleTest.m4v");

            //var vc = new OpenCvSharp.VideoCapture("./test/peopleTest.m4v");
            var vc = new OpenCvSharp.VideoCapture();

            vc.Open(1);
            ImageRecognizer imageRecognizer = new ImageRecognizer();
            int             key             = int.MinValue;

            using (Window window = new Window("capture"))
            {
                while (key < 0)
                {
                    vc.Grab();
                    var mat = vc.RetrieveMat();
                    if (mat.Empty())
                    {
                        return;
                    }

                    var faces = imageRecognizer.DetectFaces(mat);
                    if (faces != null)
                    {
                        foreach (var face in faces)
                        {
                            var faceCrop = new Mat(mat, face);
                            faceCrop.SaveImage($"./results/{Guid.NewGuid()}.jpg");
                        }
                    }

                    window.ShowImage(mat);
                    key = Cv2.WaitKey(10);
                }
            }
        }
コード例 #17
0
        public void Run()
        {
            // Opens MP4 file (ffmpeg is probably needed)
            var capture = new VideoCapture(FilePath.Movie.Bach);

            int sleepTime = (int)Math.Round(1000 / capture.Fps);

            using (var window = new Window("capture"))
            {
                // Frame image buffer
                Mat image = new Mat();

                // When the movie playback reaches end, Mat.data becomes NULL.
                while (true)
                {
                    capture.Read(image); // same as cvQueryFrame
                    if(image.Empty())
                        break;

                    window.ShowImage(image);
                    Cv2.WaitKey(sleepTime);
                } 
            }
        }
コード例 #18
0
ファイル: StatisticForm.cs プロジェクト: fel88/Dendrite
        private void button1_Click(object sender, EventArgs e)
        {
            if (th != null)
            {
                button1.Text = "start";
                stop         = true;
                th           = null;
                return;
            }
            button1.Text = "stop";
            th           = new Thread(() =>
            {
                cap          = new OpenCvSharp.VideoCapture(path);
                Stopwatch sw = Stopwatch.StartNew();
                var session1 = new InferenceSession(netPath);

                var inputMeta = session1.InputMetadata;


                Mat mat     = new Mat();
                var nFrames = cap.Get(VideoCaptureProperties.FrameCount);
                cap.Read(mat);
                var sz = mat.Size();
                if (inputDims[2] == -1)
                {
                    sz.Height = mat.Height;
                    sz.Width  = mat.Width;
                }
                string key = $"{sz.Width}x{sz.Height}";
                if (!Processing.allPriorBoxes.ContainsKey(key))
                {
                    var pd = Decoders.PriorBoxes2(sz.Width, sz.Height);;
                    Processing.allPriorBoxes.Add(key, pd);
                }
                var prior_data  = Processing.allPriorBoxes[key];
                var ofps        = cap.Get(VideoCaptureProperties.Fps);
                VideoWriter vid = null;
                if (checkBox1.Checked)
                {
                    vid = new VideoWriter("output.mp4", FourCC.XVID, ofps, mat.Size());
                }
                while (true)
                {
                    if (stop)
                    {
                        break;
                    }
                    var pf   = cap.Get(VideoCaptureProperties.PosFrames);
                    int perc = (int)Math.Round((pf / (float)nFrames) * 100);
                    progressBar1.Invoke(((Action)(() =>
                    {
                        label1.Text = $"{pf} / {nFrames}  {perc}%";
                        progressBar1.Value = perc;
                    })));
                    if (!cap.Read(mat))
                    {
                        break;
                    }
                    Mat orig = mat.Clone();
                    if (inputDims[2] == -1)
                    {
                        inputDims[2] = mat.Height;
                        inputDims[3] = mat.Width;
                    }

                    mat.ConvertTo(mat, MatType.CV_32F);
                    object param = mat;
                    foreach (var pitem in preps)
                    {
                        param = pitem.Process(param);
                    }

                    var inputData = param as float[];
                    var tensor    = new DenseTensor <float>(param as float[], inputDims);
                    var container = new List <NamedOnnxValue>();

                    container.Add(NamedOnnxValue.CreateFromTensor <float>(inputName, tensor));

                    float[] confd;
                    float[] locd;
                    using (var results = session1.Run(container))
                    {
                        var data = results.First().AsTensor <float>();
                        locd     = data.ToArray();
                        confd    = results.Skip(1).First().AsTensor <float>().ToArray();
                    }


                    Stopwatch sw2 = Stopwatch.StartNew();
                    var ret       = Processing.boxesDecode(orig.Size(), confd, locd, new System.Drawing.Size(sz.Width, sz.Height), prior_data, visTresh);
                    if (checkBox1.Checked)
                    {
                        var out1 = Processing.drawBoxes(orig, ret.Item1, ret.Item2, visTresh, ret.Item3);
                        vid.Write(out1);
                    }
                    sw2.Stop();
                }
                vid.Release();



                sw.Stop();
            });
            th.IsBackground = true;
            th.Start();
        }
コード例 #19
0
        static void Main(string[] args)
        {
            var faceengine = new FaceEngine(ASF_RegisterOrNot.ASF_REGISTER, 2);

            //faceengine.OnlineActivation("", "", "");
            FaceEngine.OnlineActivationFree("", "");
            //faceengine.OfflineActivation();


            faceengine.InitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_0_ONLY, 1, ASF_Mask.ASF_FACE_DETECT | ASF_Mask.ASF_FACERECOGNITION | ASF_Mask.ASF_AGE | ASF_Mask.ASF_LIVENESS);
            Console.WriteLine(faceengine.Version.BuildDate);
            Console.WriteLine(faceengine.Version.CopyRight);
            Console.WriteLine(faceengine.Version.Version);
            OpenCvSharp.VideoCapture videoCapture = new OpenCvSharp.VideoCapture();
            videoCapture.Open(0);

            var activeFile = FaceEngine.GetActiveFileInfo();


            ////Console.WriteLine(FaceEngine.GetActiveDeviceInfo());
            Stopwatch stopwatch = new Stopwatch();

            stopwatch.Restart();
            //faceengine.InitEngine(ASF_DetectMode.ASF_DETECT_MODE_IMAGE, ArcSoftFace_OrientPriority.ASF_OP_ALL_OUT, 9,
            //    ASF_Mask.ASF_AGE | ASF_Mask.ASF_FACE3DANGLE | ASF_Mask.ASF_FACELANDMARK | ASF_Mask.ASF_FACERECOGNITION | ASF_Mask.ASF_FACESHELTER | ASF_Mask.ASF_FACE_DETECT |
            //     ASF_Mask.ASF_GENDER | ASF_Mask.ASF_IMAGEQUALITY | ASF_Mask.ASF_IR_LIVENESS | ASF_Mask.ASF_LIVENESS | ASF_Mask.ASF_MASKDETECT | ASF_Mask.ASF_UPDATE_FACEDATA);
            //Console.WriteLine($"引擎初始化: {stopwatch.ElapsedMilliseconds}ms");
            Mat mat = new Mat();

            //Mat mat = new Mat(@"C:\Users\Jch\Desktop\2.jpg");
            while (true)
            {
                stopwatch.Restart();

                if (videoCapture.Read(mat))
                {
                    using (var img = mat.ToBitmap())
                        using (var imgInfo = ImageInfo.ReadBMP(img))
                        {
                            Console.WriteLine($"图片处理:{stopwatch.ElapsedMilliseconds}ms");
                            stopwatch.Restart();
                            var detectResult = faceengine.DetectFacesEx(imgInfo);
                            Console.WriteLine($"人脸定位:{stopwatch.ElapsedMilliseconds}ms");
                            if (detectResult != null)
                            {
                                foreach (var item in detectResult.FaceInfos)
                                {
                                    Console.WriteLine($"Age: {item.Age}");
                                    Console.WriteLine($"FaceID: {item.FaceID}");
                                    Console.WriteLine($"FaceOrient: {item.FaceOrient}");
                                    Console.WriteLine($"FaceShelter: {item.FaceShelter}");
                                    Console.WriteLine($"Gender: {item.Gender}");
                                    Console.WriteLine($"LeftEyeClosed: {item.LeftEyeClosed}");
                                    Console.WriteLine($"Liveness: {item.Liveness}");
                                    Console.WriteLine($"Mask: {item.Mask}");
                                    Console.WriteLine($"RightEyeClosed: {item.RightEyeClosed}");
                                    Console.WriteLine($"WearGlasses: {item.WearGlasses}");
                                    Console.WriteLine($"FaceRect: bottom->{item.FaceRect.bottom} left->{item.FaceRect.left} right->{item.FaceRect.right} top->{item.FaceRect.top}");
                                    Console.WriteLine($"FaceLandmark: x->{item.FaceLandmark.x} y->{item.FaceLandmark.x}");
                                    Console.WriteLine($"Face3DAngle: {item.Face3DAngle.roll} {item.Face3DAngle.yaw} {item.Face3DAngle.pitch} {item.Face3DAngle.status}");
                                    stopwatch.Restart();
                                    var feature = faceengine.FaceFeatureExtractEx(imgInfo, item);
                                    Console.WriteLine($"提取特征值: {stopwatch.ElapsedMilliseconds}ms");
                                    if (feature != null)
                                    {
                                        Console.WriteLine($"feature: {feature.Size}");
                                    }
                                    Console.WriteLine(faceengine.FaceFeatureCompare(feature.ASFFaceFeature, feature.ASFFaceFeature));
                                    var score = faceengine.ImageQualityDetectEx(imgInfo, item);
                                    Console.WriteLine($"人脸质量: {score}");
                                    Console.WriteLine("--------------------------------------------");
                                }
                            }
                        }
                }
            }

            Console.ReadLine();
        }
コード例 #20
0
        private void Init()
        {
            new Thread(() =>
            {
                var eye_casc       = new cv.CascadeClassifier("eye.xml");
                var left_eye_casc  = new cv.CascadeClassifier("left_eye.xml");
                var right_eye_casc = new cv.CascadeClassifier("right_eye.xml");
                var face_casc      = new cv.CascadeClassifier("fface_default.xml");

                cap = new cv.VideoCapture(0);

                while (true)
                {
                    if (released)
                    {
                        break;
                    }
                    var img = new cv.Mat();
                    cap.Read(img);
                    var gray  = img.CvtColor(cv.ColorConversionCodes.BGR2GRAY);
                    var gaus  = gray.AdaptiveThreshold(255, cv.AdaptiveThresholdTypes.GaussianC, cv.ThresholdTypes.Binary, 115, 1);
                    img       = gaus;
                    var faces = face_casc.DetectMultiScale(gray, 1.3, 5);
                    RenderTargetBitmap eyes_lay = null;

                    foreach (var face in faces)
                    {
                        var rect = new cv.Rect(face.Location, face.Size);
                        //img.Rectangle(rect, new cv.Scalar(255, 0, 0));

                        var sub_ing     = gray[rect];
                        var sub_ing_rgb = img[rect];

                        //left eye
                        var eyes  = eye_casc.DetectMultiScale(sub_ing, 1.3, 2);
                        int count = 0;
                        foreach (var eye in eyes)
                        {
                            count++;
                            if (count > 2)
                            {
                                count = 0;
                                break;
                            }
                            var rect_eye = new cv.Rect(eye.Location, eye.Size);

                            if (eye.X + eye.Width < face.Width / 2)
                            {
                                //sub_ing_rgb.Rectangle(rect_eye, new cv.Scalar(0, 255, 0));
                                Dispatcher.Invoke(() =>
                                {
                                    eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_l, scale_w, scale_h);
                                });
                            }
                        }

                        //left eye
                        count = 0;
                        foreach (var eye in eyes)
                        {
                            count++;
                            if (count > 2)
                            {
                                break;
                            }
                            var rect_eye = new cv.Rect(eye.Location, eye.Size);

                            if (eye.X + eye.Width > face.Width / 2)
                            {
                                Dispatcher.Invoke(() =>
                                {
                                    if (eyes_lay != null)
                                    {
                                        eyes_lay = DrawImg(eyes_lay, eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h);
                                    }
                                    else
                                    {
                                        eyes_lay = DrawImg(cv.Extensions.BitmapSourceConverter.ToBitmapSource(img), eye.X + face.X, eye.Y + face.Y, eye.Width, eye.Height, eye_r, scale_w, scale_h);
                                    }
                                });
                            }
                        }
                    }



                    Dispatcher.Invoke(() =>
                    {
                        if (eyes_lay != null)
                        {
                            OutImg.Source = eyes_lay;
                        }
                        else
                        {
                            OutImg.Source = cv.Extensions.BitmapSourceConverter.ToBitmapSource(img);
                        }
                    });
                    //Thread.Sleep(100);
                    GC.Collect();
                }
            })
            {
                IsBackground = true
            }.Start();
        }
コード例 #21
0
        /// <summary>
        /// 処理実行
        /// </summary>
        public ImageProcessValue Execute(SettingsObj obj)
        {
            try
            {
                // webカメラキャプチャ
                var camera = new OpenCvSharp.VideoCapture(0)
                {
                    //// 解像度の指定
                    FrameWidth  = 1920,
                    FrameHeight = 1080
                };

                using (camera)
                {
                    // カメラ内部パラメータ格納用
                    Mat mtx  = new Mat();
                    Mat dist = new Mat();

                    // ymlファイルを読み来み計算パラメータを取得
                    using (var fs = new FileStorage(obj.CalibratinFilePath, FileStorage.Mode.Read))
                    {
                        mtx  = fs["mtx"].ReadMat();
                        dist = fs["dist"].ReadMat();
                    }

                    var src = new Mat();

                    // 撮影画像の読み取り
                    camera.Read(src);

                    if (src.Empty())
                    {
                        return(null);
                    }

                    Mat calib = new Mat();
                    // 歪み補正
                    Cv2.Undistort(src, calib, mtx, dist);

                    // 画像処理
                    var tmp = new Mat();
                    // OpenCVのカラーの並びに変換
                    Cv2.CvtColor(calib, tmp, OpenCvSharp.ColorConversionCodes.RGB2BGR);
                    // BGR画像をHSV画像に変換
                    var hsv = new Mat();
                    Cv2.CvtColor(tmp, hsv, OpenCvSharp.ColorConversionCodes.BGR2HSV);
                    // inRange関数で範囲指定2値化 -> マスク画像として使う
                    var msk = new Mat();
                    Cv2.InRange(hsv, new Scalar(obj.HueMin, obj.SaturationMin, obj.ValueMin), new Scalar(obj.HueMax, obj.SaturationMax, obj.ValueMax), msk);

                    // bitwise_andで元画像にマスクをかける -> マスクされた部分の色だけ残る
                    var msk_src = new Mat();
                    Cv2.BitwiseAnd(hsv, hsv, msk_src, msk);
                    var show_msk = new Mat();
                    // 元の色に戻す
                    Cv2.CvtColor(msk_src, show_msk, ColorConversionCodes.HSV2BGR);
                    // グレースケール変換
                    var gray = new Mat();
                    Cv2.CvtColor(show_msk, gray, ColorConversionCodes.BGR2GRAY);
                    // 2値化
                    var th = new Mat();
                    Cv2.Threshold(gray, th, 130, 255, ThresholdTypes.Otsu);

                    // ブロブとラベリング
                    var label              = new Mat();
                    var stats              = new Mat();
                    var centroids          = new Mat();
                    ConnectedComponents cc = Cv2.ConnectedComponentsEx(th);

                    if (cc.LabelCount <= 1)
                    {
                        return(null);
                    }
                    // draw labels
                    //cc.RenderBlobs(show_msk);
                    // draw bonding boxes except background
                    foreach (var blob in cc.Blobs.Skip(1))
                    {
                        show_msk.Rectangle(blob.Rect, Scalar.Red);
                    }

                    // filter maximum blob
                    var maxBlob  = cc.GetLargestBlob();
                    var filtered = new Mat();
                    cc.FilterByBlob(show_msk, filtered, maxBlob);

                    // 矩形探索
                    // マスク画像から矩形探索
                    Point[][]        contours;
                    HierarchyIndex[] hierarchy;
                    Cv2.FindContours(th, out contours, out hierarchy, RetrievalModes.List, ContourApproximationModes.ApproxNone);
                    // 見つからなかったら何もしない
                    if (contours.Length == 0)
                    {
                        return(null);
                    }

                    // 回転を考慮した外接矩形
                    foreach (var cont in contours)
                    {
                        var rect = Cv2.MinAreaRect(cont);
                        var box  = Cv2.BoxPoints(rect).Select(x => (Point)x);
                    }

                    Cv2.DrawContours(show_msk, contours, -1, Scalar.Yellow, 3);
                    //Cv2.ImShow("show_msk", show_msk);

                    // 画像、画像上の位置発火
                    var val = new ImageProcessValue();
                    val.CameraImage = OpenCvSharp.Extensions.BitmapConverter.ToBitmap(show_msk);
                    val.Blob        = maxBlob;

                    // メモリリーク対策でDispose呼ぶ
                    mtx.Dispose();
                    dist.Dispose();
                    calib.Dispose();
                    tmp.Dispose();
                    hsv.Dispose();
                    msk.Dispose();
                    msk_src.Dispose();
                    show_msk.Dispose();
                    gray.Dispose();
                    th.Dispose();
                    label.Dispose();
                    stats.Dispose();
                    centroids.Dispose();
                    filtered.Dispose();

                    return(val);
                }
            }
            catch (Exception e)
            {
                throw e;
            }
        }
コード例 #22
-1
        public void Start(string fileName)
        {
            if (captureWorker.IsBusy)
                throw new InvalidOperationException("すでに Capture スレッドが実行中です。");

            if (string.IsNullOrWhiteSpace(fileName))
                throw new ArgumentNullException("fileName");

            FileName = fileName;

            captureWorker.DoWork += (sender, e) =>
            {
                var bw = (BackgroundWorker)sender;
                using (var capture = new VideoCapture(FileName))
                {
                    int interval = (int)(1000 / capture.Fps);
                    var image = new Mat();
                    while (true)
                    {
                        if (bw.CancellationPending)
                        {
                            e.Cancel = true;
                            return;
                        }

                        capture.Read(image);
                        if (image == null) // 動画終了
                                return;

                        bw.ReportProgress(0, image);
                        Thread.Sleep(interval);
                    }
                    e.Cancel = true;
                }
            };
            captureWorker.RunWorkerAsync();
        }
コード例 #23
-1
        public void Start(string fileName, int interval)
        {
            if (captureWorker.IsBusy)
                throw new InvalidOperationException("すでに Capture スレッドが実行中です。"); //{ Data = { { "GetValue.Arguments.fileName", fileName } } };

            FileName = fileName;

            captureWorker.DoWork += (sender, e) =>
            {
                var bw = (BackgroundWorker)sender;
                using (var capture = new VideoCapture(FileName))
                {
                    var image = new Mat();
                    while (true)
                    {
                        if (bw.CancellationPending)
                        {
                            e.Cancel = true;
                            return;
                        }

                        capture.Read(image);
                        if (image == null) // 動画終了
                                return;

                        bw.ReportProgress(0, image);
                        Thread.Sleep(interval);
                    }
                    e.Cancel = true;
                }
            };
            captureWorker.RunWorkerAsync();
        }
コード例 #24
-1
        public void Start(int cameraNumber, int interval = 1000)
        {
            if (captureWorker.IsBusy)
                return;

            CameraNumber = cameraNumber;

            captureWorker.DoWork += (sender, e) =>
            {
                var bw = (BackgroundWorker)sender;
                using (var capture = new VideoCapture(CameraNumber))
                {
                    capture.FrameHeight = 640;
                    var image = new Mat();
                    while (true)
                    {
                        if (bw.CancellationPending)
                        {
                            e.Cancel = true;
                            return;
                        }

                        capture.Read(image);
                        if (image == null)
                            throw new Exception("カメラから画像が読み取れませんでした。");

                        bw.ReportProgress(0, image);
                        Thread.Sleep(interval);
                    }
                }
            };
            captureWorker.RunWorkerAsync();
        }
コード例 #25
-2
        private void button2_Click(object sender, EventArgs e)
        {
            m_videoList = new List<VideoCapture>();
            List<VideoPacking> instance_list = new List<VideoPacking>();

            foreach(string a in m_textbox)
            {
                VideoCapture tmp = new VideoCapture(a);
                MessageBox.Show(tmp.Fps.ToString());
                m_videoList.Add(tmp);
            }

            for (int i = 0; i < m_videoList.Count; i++)
            {
                instance_list.Add(new VideoPacking(m_textbox[i], i, m_videoList[i]));
            }
            VideoMetaGenerator meta = new VideoMetaGenerator(m_videoList, m_textbox, instance_list);
            meta.Worker();
        }
コード例 #26
-4
 public void Run()
 {
     using (var capture = new VideoCapture(FilePath.Movie.Bach))
     using (var mog = BackgroundSubtractorMOG.Create())
     using (var windowSrc = new Window("src"))
     using (var windowDst = new Window("dst"))
     {
         var frame = new Mat();
         var fg = new Mat();
         while (true)
         {
             capture.Read(frame);
             if(frame.Empty())
                 break;
             mog.Run(frame, fg, 0.01);
             
             windowSrc.Image = frame;
             windowDst.Image = fg;
             Cv2.WaitKey(50);
         }
     }
 }
コード例 #27
-12
        public void Run()
        {
            const string OutVideoFile = "out.avi";

            // Opens MP4 file (ffmpeg is probably needed)
            VideoCapture capture = new VideoCapture(FilePath.Movie.Bach);

            // Read movie frames and write them to VideoWriter 
            Size dsize = new Size(640, 480);
            using (VideoWriter writer = new VideoWriter(OutVideoFile, -1, capture.Fps, dsize))
            {
                Console.WriteLine("Converting each movie frames...");
                Mat frame = new Mat();
                while(true)
                {
                    // Read image
                    capture.Read(frame);
                    if(frame.Empty())
                        break;

                    Console.CursorLeft = 0;
                    Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);

                    // grayscale -> canny -> resize
                    Mat gray = new Mat();
                    Mat canny = new Mat();
                    Mat dst = new Mat();
                    Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                    Cv2.Canny(gray, canny, 100, 180);
                    Cv2.Resize(canny, dst, dsize, 0, 0, InterpolationFlags.Linear);
                    // Write mat to VideoWriter
                    writer.Write(dst);
                } 
                Console.WriteLine();
            }

            // Watch result movie
            using (VideoCapture capture2 = new VideoCapture(OutVideoFile))
            using (Window window = new Window("result"))
            {
                int sleepTime = (int)(1000 / capture.Fps);

                Mat frame = new Mat();
                while (true)
                {
                    capture2.Read(frame);
                    if(frame.Empty())
                        break;

                    window.ShowImage(frame);
                    Cv2.WaitKey(sleepTime);
                }
            }
        }