Wrapper of HighGUI window
Наследование: DisposableObject
Пример #1
6
        public void Run()
        {
            var capture = new VideoCapture();
            capture.Set(CaptureProperty.FrameWidth, 640);
            capture.Set(CaptureProperty.FrameHeight, 480);
            capture.Open(-1);
            if (!capture.IsOpened())
                throw new Exception("capture initialization failed");

            var fs = FrameSource.CreateCameraSource(-1);
            var sr = SuperResolution.CreateBTVL1();
            sr.SetInput(fs);

            using (var normalWindow = new Window("normal"))
            using (var srWindow = new Window("super resolution"))
            {
                var normalFrame = new Mat();
                var srFrame = new Mat();
                while (true)
                {
                    capture.Read(normalFrame);
                    sr.NextFrame(srFrame);
                    if (normalFrame.Empty() || srFrame.Empty())
                        break;
                    normalWindow.ShowImage(normalFrame);
                    srWindow.ShowImage(srFrame);
                    Cv2.WaitKey(100);
                }
            }
        }
        public MainWindow()
        {
            InitializeComponent();

            using var capture = new VideoCapture(0, VideoCaptureAPIs.DSHOW);
            if (!capture.IsOpened())
            {
                return;
            }

            capture.FrameWidth  = 1920;
            capture.FrameHeight = 1280;
            capture.AutoFocus   = true;

            const int sleepTime = 10;

            using var window = new OpenCvSharp.Window("capture");
            var image = new Mat();

            while (true)
            {
                capture.Read(image);
                if (image.Empty())
                {
                    break;
                }

                window.ShowImage(image);
                int c = Cv2.WaitKey(sleepTime);
                if (c >= 0)
                {
                    break;
                }
            }
        }
Пример #3
0
    public Task StartTakePicture(int intervalMSec, CancellationTokenSource cancelTokenSource)
    {
        if (string.IsNullOrEmpty(photoFileNamePrefix))
        {
            var css    = ConnectionString.Split(";");
            var devdef = css[1].Split("=");
            photoFileNamePrefix = devdef[1];
        }
        lock (this) {
            takePhotoIntervalMSec = intervalMSec;
        }
        var tmpFileName     = photoFileName + photoFileNameExt;
        var photoTakingTask = Task.Factory.StartNew(async() => {
            var capture = OpenCvSharp.VideoCapture.FromCamera(0);
            using (var win = new OpenCvSharp.Window())
                using (var mat = new OpenCvSharp.Mat()) {
                    while (true)
                    {
                        capture.Read(mat);
                        win.ShowImage(mat);
                        var now = DateTime.Now;
                        if (File.Exists(tmpFileName))
                        {
                            File.Delete(tmpFileName);
                        }
                        using (var fs = new FileStream(tmpFileName, FileMode.CreateNew)) {
                            mat.WriteToStream(fs, photoFileNameExt, null);
                        }
                        var fileName = photoFileNamePrefix + now.ToString("yyyyMMddHHmmss") + photoFileNameExt;
                        await UploadFile(fileName, new FileInfo(tmpFileName).FullName);
                        var interval = 0;
                        lock (this) {
                            interval = takePhotoIntervalMSec;
                        }
                        await Task.Delay(interval);
                        if (cancelTokenSource.IsCancellationRequested)
                        {
                            break;
                        }
                    }
                    throw new OperationCanceledException(cancelTokenSource.Token);
                }
        }, cancelTokenSource.Token);

        return(photoTakingTask);
    }
 private void Button_Click(object sender, RoutedEventArgs e)
 {
     using (var capture = new VideoCapture(0))
         using (var win = new OpenCvSharp.Window("capture"))
             using (var mat = new Mat())
             {
                 capture.Set(CaptureProperty.FrameWidth, 640);
                 capture.Set(CaptureProperty.FrameHeight, 360);
                 while (true)
                 {
                     capture.Read(mat);
                     win.ShowImage(mat);
                     if (Cv2.WaitKey(30) >= 0)
                     {
                         break;
                     }
                 }
             }
 }
Пример #5
0
        public void Run()
        {
            var img = Cv2.ImRead(FilePath.Image.Asahiyama, ImreadModes.Color);

            var hog = new HOGDescriptor();
            hog.SetSVMDetector(HOGDescriptor.GetDefaultPeopleDetector());

            bool b = hog.CheckDetectorSize();
            Console.WriteLine("CheckDetectorSize: {0}", b);

            var watch = Stopwatch.StartNew();

            // run the detector with default parameters. to get a higher hit-rate
            // (and more false alarms, respectively), decrease the hitThreshold and
            // groupThreshold (set groupThreshold to 0 to turn off the grouping completely).
            Rect[] found = hog.DetectMultiScale(img, 0, new Size(8, 8), new Size(24, 16), 1.05, 2);

            watch.Stop();
            Console.WriteLine("Detection time = {0}ms", watch.ElapsedMilliseconds);
            Console.WriteLine("{0} region(s) found", found.Length);

            foreach (Rect rect in found)
            {
                // the HOG detector returns slightly larger rectangles than the real objects.
                // so we slightly shrink the rectangles to get a nicer output.
                var r = new Rect
                {
                    X = rect.X + (int)Math.Round(rect.Width * 0.1),
                    Y = rect.Y + (int)Math.Round(rect.Height * 0.1),
                    Width = (int)Math.Round(rect.Width * 0.8),
                    Height = (int)Math.Round(rect.Height * 0.8)
                };
                img.Rectangle(r.TopLeft, r.BottomRight, Scalar.Red, 3);
            }

            using (var window = new Window("people detector", WindowMode.Normal, img))
            {
                window.SetProperty(WindowProperty.Fullscreen, 1);
                Cv2.WaitKey(0);
            }
        }
Пример #6
0
        public MainWindowViewModel()
        {
            mWin = new OpenCvSharp.Window("Source Window");
            CvMouseCallback callback = new CvMouseCallback(CvMouseMove);

            Cv2.SetMouseCallback("Source Window", callback);

            mImagePoints = new List <Point2f>();
            //Initialize source points.
            //Top left corner of paper list taken as the origin of the coordinate system.
            //Bypassing the points is clockwise.
            mWorldPoints = new List <Point3f>()
            {
                new Point3f(0, 0, 0),
                new Point3f(210, 0, 0),
                new Point3f(210, 297, 0),
                new Point3f(0, 297, 0)
            };

            ResultIsUnknown();
        }
Пример #7
0
        public Mat Crop()
        {
            using (CroppingWindow = new OpenCvSharp.Window("Cropper", WindowMode.FullScreen, srcImage))
            {
                CvMouseCallback onMouse = new CvMouseCallback(mouseCallback);
                CroppingWindow.SetMouseCallback(onMouse);
                CvTrackbarCallback2 onZoom = new CvTrackbarCallback2(trackbarCallback);
                CvTrackbar          zoom   = CroppingWindow.CreateTrackbar2("Zoom", 100, 200, onZoom, null);
                Cv2.WaitKey();

                // This line is very important
                // OpenCV is written in C++, which means it's unmanaged.
                // So when a delegate callback variable is passed to it as a function point,
                // GC in C# can no longer identify the life cycle of this delegate variable
                // It's our reponsebility to make sure GC in C# will not collect it when it's still in use.
                // Otherwise an exception will be thrown.
                GC.KeepAlive(onMouse);
            }
            // seems that srcImage will be released by GC, so I must return a copy of it
            return(srcImage.Clone());
        }
Пример #8
0
        public void Run()
        {
            // Opens MP4 file (ffmpeg is probably needed)
            var capture = new VideoCapture(FilePath.Movie.Bach);

            int sleepTime = (int)Math.Round(1000 / capture.Fps);

            using (var window = new Window("capture"))
            {
                // Frame image buffer
                Mat image = new Mat();

                // When the movie playback reaches end, Mat.data becomes NULL.
                while (true)
                {
                    capture.Read(image); // same as cvQueryFrame
                    if(image.Empty())
                        break;

                    window.ShowImage(image);
                    Cv2.WaitKey(sleepTime);
                } 
            }
        }
Пример #9
0
    static int Main(string[] args)
    {
        try
        {
            // print usage
            Console.WriteLine("Usage: dotnet csharp_video_recognition_demo.dll [OPTIONS] <video_source>...");
            Console.WriteLine("Examples:");
            Console.WriteLine("    Webcam:  dotnet csharp_video_recognition_demo.dll --config_dir ../../../conf/facerec 0");
            Console.WriteLine("    RTSP stream:  dotnet csharp_video_recognition_demo.dll --config_dir ../../../conf/facerec rtsp://localhost:8554/");
            Console.WriteLine("");

            // parse arguments
            bool    error   = false;
            Options options = new Options();
            CommandLine.Parser.Default.ParseArguments <Options>(args)
            .WithParsed <Options>(opts => options  = opts)
            .WithNotParsed <Options>(errs => error = true);

            // exit if argument parsign error
            if (error)
            {
                return(1);
            }

            // print values of arguments
            Console.WriteLine("Arguments:");
            foreach (var opt in options.GetType().GetProperties())
            {
                if (opt.Name == "video_sources")
                {
                    Console.Write("video sources = ");
                    foreach (string vs in options.video_sources)
                    {
                        Console.Write(vs + " ");
                    }
                    Console.WriteLine();
                }
                else
                {
                    Console.WriteLine("--{0} = {1}", opt.Name, opt.GetValue(options, null));
                }
            }
            Console.WriteLine("\n");

            //parameters parse
            string        config_dir    = options.config_dir;
            string        license_dir   = options.license_dir;
            string        database_dir  = options.database_dir;
            string        method_config = options.method_config;
            float         recognition_distance_threshold = options.recognition_distance_threshold;
            float         frame_fps_limit = options.frame_fps_limit;
            List <string> video_sources   = new List <string>(options.video_sources);

            // check params
            MAssert.Check(config_dir != string.Empty, "Error! config_dir is empty.");
            MAssert.Check(database_dir != string.Empty, "Error! database_dir is empty.");
            MAssert.Check(method_config != string.Empty, "Error! method_config is empty.");
            MAssert.Check(recognition_distance_threshold > 0, "Error! Failed recognition distance threshold.");

            List <ImageAndDepthSource> sources = new List <ImageAndDepthSource>();
            List <string> sources_names        = new List <string>();


            MAssert.Check(video_sources.Count > 0, "Error! video_sources is empty.");

            for (int i = 0; i < video_sources.Count; i++)
            {
                sources_names.Add(string.Format("OpenCvS source {0}", i));
                sources.Add(new OpencvSource(video_sources[i]));
            }


            MAssert.Check(sources_names.Count == sources.Count);

            // print sources
            Console.WriteLine("\n{0} sources: ", sources.Count);

            for (int i = 0; i < sources_names.Count; ++i)
            {
                Console.WriteLine("  {0}", sources_names[i]);
            }
            Console.WriteLine("");

            // create facerec servcie
            FacerecService service =
                FacerecService.createService(
                    config_dir,
                    license_dir);

            Console.WriteLine("Library version: {0}\n", service.getVersion());

            // create database
            Recognizer recognizer = service.createRecognizer(method_config, true, false, false);
            Capturer   capturer   = service.createCapturer("common_capturer4_lbf_singleface.xml");
            Database   database   = new Database(
                database_dir,
                recognizer,
                capturer,
                recognition_distance_threshold);
            recognizer.Dispose();
            capturer.Dispose();

            FacerecService.Config vw_config = new FacerecService.Config("video_worker_fdatracker_blf_fda.xml");
            // vw_config.overrideParameter("single_match_mode", 1);
            vw_config.overrideParameter("search_k", 10);
            vw_config.overrideParameter("not_found_match_found_callback", 1);
            vw_config.overrideParameter("downscale_rawsamples_to_preferred_size", 0);

            //ActiveLiveness.CheckType[] checks = new ActiveLiveness.CheckType[3]
            //{
            //	ActiveLiveness.CheckType.BLINK,
            //			ActiveLiveness.CheckType.TURN_RIGHT,
            //			ActiveLiveness.CheckType.SMILE
            //};


            // create one VideoWorker
            VideoWorker video_worker =
                service.createVideoWorker(
                    new VideoWorker.Params()
                    .video_worker_config(vw_config)
                    .recognizer_ini_file(method_config)
                    .streams_count(sources.Count)
                    //.age_gender_estimation_threads_count(sources.Count)
                    //.emotions_estimation_threads_count(sources.Count)
                    //.active_liveness_checks_order(checks)
                    .processing_threads_count(sources.Count)
                    .matching_threads_count(sources.Count));

            // set database
            video_worker.setDatabase(database.vwElements, Recognizer.SearchAccelerationType.SEARCH_ACCELERATION_1);

            for (int i = 0; i < sources_names.Count; ++i)
            {
                OpenCvSharp.Window window = new OpenCvSharp.Window(sources_names[i]);

                OpenCvSharp.Cv2.ImShow(sources_names[i], new OpenCvSharp.Mat(100, 100, OpenCvSharp.MatType.CV_8UC3, OpenCvSharp.Scalar.All(0)));
            }

            // prepare buffers for store drawed results
            Mutex draw_images_mutex            = new Mutex();
            List <OpenCvSharp.Mat> draw_images = new List <OpenCvSharp.Mat>(sources.Count);


            // create one worker per one source
            List <Worker> workers = new List <Worker>();

            for (int i = 0; i < sources.Count; ++i)
            {
                draw_images.Add(new OpenCvSharp.Mat(100, 100, OpenCvSharp.MatType.CV_8UC3, OpenCvSharp.Scalar.All(0)));
                workers.Add(new Worker(
                                database,
                                video_worker,
                                sources[i],
                                i,          // stream_id
                                draw_images_mutex,
                                draw_images[i],
                                frame_fps_limit
                                ));
            }


            // draw results until escape presssed
            for (;;)
            {
                {
                    draw_images_mutex.WaitOne();
                    for (int i = 0; i < draw_images.Count; ++i)
                    {
                        OpenCvSharp.Mat drawed_im = workers[i]._draw_image;
                        if (!drawed_im.Empty())
                        {
                            OpenCvSharp.Cv2.ImShow(sources_names[i], drawed_im);
                            draw_images[i] = new OpenCvSharp.Mat();
                        }
                    }
                    draw_images_mutex.ReleaseMutex();
                }

                int key = OpenCvSharp.Cv2.WaitKey(20);
                if (27 == key)
                {
                    foreach (Worker w in workers)
                    {
                        w.Dispose();
                    }
                    break;
                }

                if (' ' == key)
                {
                    Console.WriteLine("enable processing 0");
                    video_worker.enableProcessingOnStream(0);
                }

                if (13 == key)
                {
                    Console.WriteLine("disable processing 0");
                    video_worker.disableProcessingOnStream(0);
                }


                if ('r' == key)
                {
                    Console.WriteLine("reset trackerOnStream");
                    video_worker.resetTrackerOnStream(0);
                }


                // check exceptions in callbacks
                video_worker.checkExceptions();
            }

            // force free resources
            // otherwise licence error may occur
            // when create sdk object in next time
            service.Dispose();
            video_worker.Dispose();
        }
        catch (Exception e)
        {
            Console.WriteLine("video_recognition_show exception catched: '{0}'", e.ToString());
            return(1);
        }

        return(0);
    }
Пример #10
0
        /// <summary>
        /// Classical Multidimensional Scaling
        /// </summary>
        public void Run()
        {
            // creates distance matrix
            int size = CityDistance.GetLength(0);
            Mat t = new Mat(size, size, MatType.CV_64FC1, CityDistance);
            // adds Torgerson's additive constant to t
            double torgarson = Torgerson(t);
            t += torgarson;
            // squares all elements of t
            t = t.Mul(t);

            // centering matrix G
            Mat g = CenteringMatrix(size);
            // calculates inner product matrix B
            Mat b = g * t * g.T() * -0.5;
            // calculates eigenvalues and eigenvectors of B
            Mat values = new Mat();
            Mat vectors = new Mat();
            Cv2.Eigen(b, values, vectors);
            for (int r = 0; r < values.Rows; r++)
            {
                if (values.Get<double>(r) < 0)
                    values.Set<double>(r, 0);
            }

            //Console.WriteLine(values.Dump());

            // multiplies sqrt(eigenvalue) by eigenvector
            Mat result = vectors.RowRange(0, 2);
            {
                var at = result.GetGenericIndexer<double>();
                for (int r = 0; r < result.Rows; r++)
                {
                    for (int c = 0; c < result.Cols; c++)
                    {
                        at[r, c] *= Math.Sqrt(values.Get<double>(r));
                    }
                }
            }

            // scaling
            Cv2.Normalize(result, result, 0, 800, NormTypes.MinMax);

            // opens a window
            using (Mat img = Mat.Zeros(600, 800, MatType.CV_8UC3))
            using (Window window = new Window("City Location Estimation"))
            {
                var at = result.GetGenericIndexer<double>();
                for (int c = 0; c < size; c++)
                {
                    double x = at[0, c];
                    double y = at[1, c];
                    x = x * 0.7 + img.Width * 0.1;
                    y = y * 0.7 + img.Height * 0.1;
                    img.Circle((int)x, (int)y, 5, Scalar.Red, -1);
                    Point textPos = new Point(x + 5, y + 10);
                    img.PutText(CityNames[c], textPos, HersheyFonts.HersheySimplex, 0.5, Scalar.White);
                }
                window.Image = img;
                Cv2.WaitKey();
            }
        }
Пример #11
0
        public static int Main(string[] args)
        {
            // Parse input arguments
            string videoPath, facePath, studyPath, output;
            int    parseResult = ParseArgs(args, out videoPath, out facePath, out studyPath, out output);

            if (parseResult != 2)
            {
                return(parseResult);
            }

            // Create a Factory object
            var factory = new Dfx.Sdk.Factory();

            Console.WriteLine($"Created DFX Factory: {factory.Version}");

            // Initialize a study
            if (!factory.InitializeStudyFromFile(studyPath))
            {
                Console.WriteLine($"DFX study initialization failed: {factory.LastErrorMessage}");
                return(1);
            }
            Console.WriteLine($"Created study from {studyPath}");

            // Create a collector
            var collector = factory.CreateCollector();

            if (collector.CurrentState == Dfx.Sdk.Collector.State.ERROR)
            {
                Console.WriteLine($"Collector creation failed: {collector.LastErrorMessage}");
                Console.ReadKey();
                return(1);
            }
            Console.WriteLine("Created collector");

            // Load the face tracking data
            var jsonFaces = LoadJsonFaces(facePath);

            // Load video file (or stream of images)
            var videocap      = Cv.VideoCapture.FromFile(videoPath);
            var videoFileName = Path.GetFileName(videoPath);

            // Set target FPS and chunk duration
            double    targetFps             = videocap.Get(Cv.CaptureProperty.Fps);
            double    videoFrameCount       = videocap.Get(Cv.CaptureProperty.FrameCount);
            const int chunkDuration_s       = 5;
            const int KLUDGE                = 1;
            double    chunkFrameCount       = Math.Ceiling(chunkDuration_s * targetFps + KLUDGE);
            ulong     numberChunks          = (ulong)Math.Ceiling(videoFrameCount / chunkFrameCount); // Ask more chunks then needed
            double    durationOfOneFrame_ns = 1000_000_000.0 / targetFps;

            collector.TargetFps     = (float)targetFps;
            collector.ChunkDuration = chunkDuration_s;
            collector.NumberChunks  = numberChunks;

            Console.WriteLine($"    mode: {factory.Mode}");
            Console.WriteLine($"    number chunks: {collector.NumberChunks}");
            Console.WriteLine($"    chunk duration: {collector.ChunkDuration}");
            foreach (var constraint in collector.GetEnabledConstraints())
            {
                Console.WriteLine($"    enabled constraint: {constraint}");
            }

            // Start collection
            collector.StartCollection();

            // Start reading frames and adding to collector
            uint frameNumber = 0;
            bool success     = false;

            using (var window = new Cv.Window("capture"))
            {
                Cv.Mat image = new Cv.Mat();
                while (true)
                {
                    bool ret = videocap.Read(image);
                    if (!ret || image.Empty())
                    {
                        // Video ended, so grab what should be the last, possibly truncated chunk
                        var chunkData = collector.ChunkData;
                        if (chunkData != null)
                        {
                            var chunkPayload = chunkData.Payload;
                            //if (output != null)
                            //    savePayload(chunkPayload, output);
                            Console.WriteLine($"Got chunk with {chunkPayload}");
                        }
                        else
                        {
                            Console.WriteLine("Got empty chunk");
                        }
                        success = true;
                        break;
                    }

                    // Create a Dfx VideoFrame
                    using (Dfx.Sdk.VideoFrame videoFrame = new Dfx.Sdk.VideoFrame((ushort)image.Rows,
                                                                                  (ushort)image.Cols,
                                                                                  Dfx.Sdk.PixelType.TYPE_8UC3,
                                                                                  image.Channels() * image.Cols,
                                                                                  image.Data,
                                                                                  Dfx.Sdk.ChannelOrder.BGR,
                                                                                  (ulong)(frameNumber * durationOfOneFrame_ns),
                                                                                  frameNumber))
                    {
                        frameNumber++;

                        // Create a Dfx Frame from the VideoFrame
                        var frame = collector.CreateFrame(videoFrame);

                        // Add the Dfx Face to the Dfx Frame
                        var jsonFace = jsonFaces[frameNumber.ToString()];
                        var face     = new Dfx.Sdk.Face((string)jsonFace["id"]);
                        face.PoseValid = (bool)jsonFace["poseValid"];
                        face.Detected  = (bool)jsonFace["detected"];
                        face.SetRect((ushort)jsonFace["rect.x"], (ushort)jsonFace["rect.y"], (ushort)jsonFace["rect.w"], (ushort)jsonFace["rect.h"]);
                        foreach (JProperty entry in jsonFace["points"])
                        {
                            face.AddPosePoint(entry.Name, new Dfx.Sdk.PosePoint((float)entry.Value["x"],
                                                                                (float)entry.Value["y"],
                                                                                0,
                                                                                (bool)entry.Value["valid"],
                                                                                (bool)entry.Value["estimated"],
                                                                                (float)entry.Value["quality"]));
                        }
                        frame.AddFace(face);

                        // Add a marker to the 1000th dfx_frame
                        if (frameNumber == 1000)
                        {
                            frame.AddMarker("This is the 1000th frame");
                        }

                        // Do the extraction
                        collector.DefineRegions(frame);
                        var result = collector.ExtractChannels(frame);

                        // Grab a chunk and check if we are finished
                        if (result == Dfx.Sdk.Collector.State.CHUNKREADY || result == Dfx.Sdk.Collector.State.COMPLETED)
                        {
                            var chunkData = collector.ChunkData;
                            if (chunkData != null)
                            {
                                var chunkPayload = chunkData.Payload;
                                //if (output != null)
                                //    savePayload(chunkPayload, output);
                                Console.WriteLine($"Got chunk with {chunkPayload}");
                            }
                            else
                            {
                                Console.WriteLine("Got empty chunk");
                            }
                            if (result == Dfx.Sdk.Collector.State.COMPLETED)
                            {
                                Console.WriteLine($"{nameof(Dfx.Sdk.Collector.State.COMPLETED)} at frame {frameNumber}");
                                success = true;
                                break;
                            }
                        }

                        // Render
                        if (true)
                        {
                            foreach (var faceID in frame.FaceIdentifiers)
                            {
                                foreach (var regionID in frame.GetRegionNames(faceID))
                                {
                                    if (frame.GetRegionIntProperty(faceID, regionID, "draw") != 0)
                                    {
                                        var dfxpolygon = frame.GetRegionPolygon(faceID, regionID);
                                        var cvpolygon  = new List <Cv.Point>();
                                        foreach (var point in dfxpolygon)
                                        {
                                            cvpolygon.Add(new Cv.Point(point.X, point.Y));
                                        }
                                        var cvpolygons = new List <List <Cv.Point> >();
                                        cvpolygons.Add(cvpolygon);
                                        Cv.Cv2.Polylines(image, cvpolygons, isClosed: true, color: Cv.Scalar.Cyan, thickness: 1, lineType: Cv.LineTypes.AntiAlias);
                                    }
                                }
                            }

                            string msg = $"Extracting from {videoFileName} - frame {frameNumber} of {videoFrameCount}";
                            Cv.Cv2.PutText(image, msg, org: new Cv.Point(10, 30), fontFace: Cv.HersheyFonts.HersheyPlain, fontScale: 1, color: Cv.Scalar.Black, thickness: 1, lineType: Cv.LineTypes.AntiAlias);

                            window.ShowImage(image);
                            if (Cv.Cv2.WaitKey(1) == 'q')
                            {
                                success = false;
                                break;
                            }
                        }
                    }
                }
            }

            if (success)
            {
                Console.WriteLine("Collection finished completely. Press any key to exit...");
            }
            else
            {
                Console.WriteLine("Collection interrupted or failed. Press any key to exit...");
            }

            // When everything done, release the capture
            videocap.Release();

            Console.ReadKey();

            return(0);
        }
Пример #12
-4
 public void Run()
 {
     using (var capture = new VideoCapture(FilePath.Movie.Bach))
     using (var mog = BackgroundSubtractorMOG.Create())
     using (var windowSrc = new Window("src"))
     using (var windowDst = new Window("dst"))
     {
         var frame = new Mat();
         var fg = new Mat();
         while (true)
         {
             capture.Read(frame);
             if(frame.Empty())
                 break;
             mog.Run(frame, fg, 0.01);
             
             windowSrc.Image = frame;
             windowDst.Image = fg;
             Cv2.WaitKey(50);
         }
     }
 }
Пример #13
-12
        public void Run()
        {
            const string OutVideoFile = "out.avi";

            // Opens MP4 file (ffmpeg is probably needed)
            VideoCapture capture = new VideoCapture(FilePath.Movie.Bach);

            // Read movie frames and write them to VideoWriter 
            Size dsize = new Size(640, 480);
            using (VideoWriter writer = new VideoWriter(OutVideoFile, -1, capture.Fps, dsize))
            {
                Console.WriteLine("Converting each movie frames...");
                Mat frame = new Mat();
                while(true)
                {
                    // Read image
                    capture.Read(frame);
                    if(frame.Empty())
                        break;

                    Console.CursorLeft = 0;
                    Console.Write("{0} / {1}", capture.PosFrames, capture.FrameCount);

                    // grayscale -> canny -> resize
                    Mat gray = new Mat();
                    Mat canny = new Mat();
                    Mat dst = new Mat();
                    Cv2.CvtColor(frame, gray, ColorConversionCodes.BGR2GRAY);
                    Cv2.Canny(gray, canny, 100, 180);
                    Cv2.Resize(canny, dst, dsize, 0, 0, InterpolationFlags.Linear);
                    // Write mat to VideoWriter
                    writer.Write(dst);
                } 
                Console.WriteLine();
            }

            // Watch result movie
            using (VideoCapture capture2 = new VideoCapture(OutVideoFile))
            using (Window window = new Window("result"))
            {
                int sleepTime = (int)(1000 / capture.Fps);

                Mat frame = new Mat();
                while (true)
                {
                    capture2.Read(frame);
                    if(frame.Empty())
                        break;

                    window.ShowImage(frame);
                    Cv2.WaitKey(sleepTime);
                }
            }
        }