Example #1
0
        private void timer1_Tick(object sender, EventArgs e)
        {
            vidFrame++;
            if (vidFrame >= video.GetCaptureProperty(CapProp.FrameCount))
            {
                timer1.Enabled = false;
            }
            else
            {
                var frame = video.QueryFrame();
                Image <Bgr, byte> image = frame.ToImage <Bgr, byte>();

                if (typs == 0)
                {
                    imageBox1.Image = image;
                }
                else
                if (background != null && typs == 1)
                {
                    imageBox1.Image = logic.diffusal(image, background);
                }
                else
                if (typs == 2)
                {
                    var foregroundMask = image.Convert <Gray, byte>().CopyBlank();
                    backgrSubstr.Apply(image.Convert <Gray, byte>(), foregroundMask);
                    var filtrMask = logic.FilterMask(foregroundMask, image);
                    imageBox1.Image = filtrMask;
                }
            }
        }
Example #2
0
 private void vtimer_Tick(object sender, EventArgs e)
 {
     vframe++;
     if (vframe >= capture.GetCaptureProperty(CapProp.FrameCount))
     {
         vtimer.Enabled = false;
     }
     else
     {
         var frame = capture.QueryFrame();
         Image<Bgr, byte> image = frame.ToImage<Bgr, byte>();
         
         
         if (mode == 0)
         {
             IMG1.Image = image;
         }else
         if (background != null && mode == 1)
         {
             IMG1.Image = fil.diffusal(image, background);
         }else
         if(mode == 2)
         {
             var foregroundMask = image.Convert<Gray, byte>().CopyBlank();
             subtractor.Apply(image.Convert<Gray, byte>(), foregroundMask);
             var filteredMask = fil.FilterMask(foregroundMask, image);
             IMG1.Image = filteredMask;
         }
     }
     
 }
Example #3
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            var frame = new Mat();

            capture.Retrieve(frame);
            imageBox1.Image = frame;

            if (typs == 0)
            {
                imageBox2.Image = frame;
            }


            frame = new Mat();
            capture.Retrieve(frame);
            Image <Bgr, byte> image = frame.ToImage <Bgr, byte>();



            if (bg != null && typs == 1)
            {
                imageBox2.Image = diffusal(image, bg);
            }
            else
            if (typs == 2)
            {
                var foregroundMask = image.Convert <Gray, byte>().CopyBlank();
                subtractor.Apply(image.Convert <Gray, byte>(), foregroundMask);
                var filteredMask = FilterMask(foregroundMask, image);
                imageBox2.Image = filteredMask;
            }
        }
Example #4
0
        // Background Subtraction From the Given Background and Input Image
        public void removebackground(string filepath = null)
        {
            CvInvoke.Imshow("1- Background Image", bgImage);
            CvInvoke.Imshow("2- Forground Image", img);
            Image <Gray, byte>       output       = new Image <Gray, byte>(bgImage.Width, bgImage.Height);
            BackgroundSubtractorMOG2 bgsubtractor = new BackgroundSubtractorMOG2(varThreshold: 100, shadowDetection: false);

            bgsubtractor.Apply(bgImage, output);
            bgsubtractor.Apply(img, output);
            pictureViewBox.Image = output;

            CvInvoke.Imshow("3- Background Subtracted", output);
            //output.Canny(100,100);

            CvInvoke.Erode(output, output, null, new System.Drawing.Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));

            CvInvoke.Imshow("4- After Applying Erode", output);
            CvInvoke.Dilate(output, output, null, new System.Drawing.Point(-1, -1), 5, BorderType.Reflect, default(MCvScalar));

            CvInvoke.Imshow("5- After Dilation", output);

            // Write the Silhoutte output to the file
            if (filepath != null && saveResults == true)
            {
                CvInvoke.Imwrite(outputFolder + "bg_subtract_" + filepath, output);
            }

            // finding the Bounding Box of the Person
            frm = new PersonFrame();
            Rectangle rec = frm.findBoundry(output);

            // Using Thinning Algorithm on Silhoutte
            Image <Gray, byte> thinOutput = new Image <Gray, byte>(output.Width, output.Height);

            XImgprocInvoke.Thinning(output, thinOutput, ThinningTypes.ZhangSuen);
            pictureViewBox.Image = thinOutput.Not().Not();
            CvInvoke.Imshow("6- After Thinning Zhang Suen", thinOutput);
            // Write the thinned Image to the file
            if (filepath != null && saveResults == true)
            {
                CvInvoke.Imwrite(outputFolder + "thinned_" + filepath, thinOutput.Not().Not());
            }

            // drawing bounding Box of the person
            CvInvoke.Rectangle(thinOutput, rec, new Rgb(Color.White).MCvScalar, 2);
            CvInvoke.Imshow("Person Bounding Box", thinOutput);
            // drawing the middle line of the Person
            //CvInvoke.Line(thinOutput, frm.middle_line.p1, frm.middle_line.p2, new Rgb(Color.White).MCvScalar, 2);

            // Display the Image
            //CvInvoke.Imshow("Person Fame", thinOutput);

            // Applying Hough Line Transformation
            Hough(thinOutput, filepath);

            img.Dispose();
            output.Dispose();
            thinOutput.Dispose();
        }
Example #5
0
        private Image <Gray, byte> GetMask(Image <Gray, byte> frame)
        {
            var foregroundMask = frame.CopyBlank();

            subtractor.Apply(frame, foregroundMask);
            return(FilterMask(foregroundMask));
        }
Example #6
0
        public Image <Bgr, byte> obl(Mat frame, int tb1)
        {
            Image <Gray, byte> cur = frame.ToImage <Gray, byte>();

            var foregroundMask = cur.CopyBlank();

            foregroundMask = FilterMask(foregroundMask);

            subtractor.Apply(cur, foregroundMask);

            foregroundMask._ThresholdBinary(new Gray(100), new Gray(255));

            foregroundMask.Erode(3);
            foregroundMask.Dilate(4);

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(foregroundMask, contours, null, RetrType.External, ChainApproxMethod.ChainApproxTc89L1);

            var output = frame.ToImage <Bgr, byte>().Copy();

            for (int i = 0; i < contours.Size; i++)
            {
                if (CvInvoke.ContourArea(contours[i]) > tb1) //игнорирование маленьких контуров
                {
                    Rectangle rect = CvInvoke.BoundingRectangle(contours[i]);
                    output.Draw(rect, new Bgr(Color.Blue), 1);
                }
            }
            return(output);
        }
Example #7
0
        private Image <Bgr, byte> Process(Mat frame)
        {
            Image <Gray, byte> cur = frame.ToImage <Gray, byte>();

            //Image<Gray, byte> diff = bg.AbsDiff(cur);

            var foregroundMask = cur.CopyBlank();

            foregroundMask = FilterMask(foregroundMask);

            subtractor.Apply(cur, foregroundMask);

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(foregroundMask, contours, null, RetrType.External,
                                  ChainApproxMethod.ChainApproxTc89L1);
            var output = frame.ToImage <Bgr, byte>().Copy();

            for (int i = 0; i < contours.Size; i++)
            {
                if (CvInvoke.ContourArea(contours[i]) > Area)
                {
                    Rectangle boundingRect = CvInvoke.BoundingRectangle(contours[i]);
                    output.Draw(boundingRect, new Bgr(Color.GreenYellow), 2);
                }
            }
            return(output);
        }
Example #8
0
        public CameraTrackingUpdateReturnModel Update()
        {
            // capture frame

            Mat frame = _cameraCapture.QueryFrame();

            //filter out noises

            Mat smoothedFrame = new Mat();

            CvInvoke.GaussianBlur(
                frame,
                smoothedFrame,
                new Size(this.frameBlurStrength, this.frameBlurStrength),
                1);

            // get mask for preview

            Mat forgroundMask = new Mat();

            _fgDetector.Apply(smoothedFrame, forgroundMask);

            this.lastFrame = frame;
            this.lastMask  = forgroundMask;

            return(new CameraTrackingUpdateReturnModel()
            {
                Frame = frame,
                Mask = forgroundMask
            });
        }
Example #9
0
        /*--- Second Part ---*/

        public Image <Gray, byte> ForegroundMask(Image <Gray, byte> grayImage)
        {
            var foregroundMask = grayImage.CopyBlank();

            substractor.Apply(grayImage, foregroundMask);

            return(foregroundMask);
        }
Example #10
0
        public Image <Bgr, byte> timerVideo()
        {
            var frame = capture.QueryFrame();

            sourceImage = frame.ToImage <Bgr, byte>(); //обрабатываемое изображение из функции Processing приравниваем к фрейму
            //var videoImage = Processing(); //на финальное изображение накладываем фильтр, вызывая функцию
            capture.Retrieve(frame);
            Image <Gray, byte> cur = frame.ToImage <Gray, byte>();

            var foregroundMask = cur.CopyBlank();

            subtractor.Apply(cur, foregroundMask);

            foregroundMask._ThresholdBinary(new Gray(253), new Gray(255));

            foregroundMask.Erode(3);
            foregroundMask.Dilate(4);
            FilterMask(foregroundMask);

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(
                foregroundMask,
                contours,
                null,
                RetrType.External,      // получение только внешних контуров
                ChainApproxMethod.ChainApproxTc89L1);

            var output = frame.ToImage <Bgr, byte>().Copy();

            for (int i = 0; i < contours.Size; i++)
            {
                if (CvInvoke.ContourArea(contours[i], false) > 700) //игнорирование маленьких контуров
                {
                    Rectangle rect = CvInvoke.BoundingRectangle(contours[i]);
                    output.Draw(rect, new Bgr(Color.GreenYellow), 1);
                }
            }

            frameCounter++;
            return(output);
        }
Example #11
0
 private void ProcessVideo(object sender, EventArgs e)
 {
     FrameCounter++;
     try
     {
         // Check the end of video
         if (FrameCounter == TotalFrames)
         {
             capturedVideo.Dispose();
             return;
         }
         capturedVideo.Retrieve(originalFrame);
         // Check which frames to show
         if (playingState == 0)
         {
             displayingFrame = originalFrame.Clone();
         }
         else if (playingState == 1)
         {
             CvInvoke.CvtColor(originalFrame, originalFrame, Emgu.CV.CvEnum.ColorConversion.Bgra2Gray, 1);
             mog.Apply(originalFrame, thresholdedFrame);
             displayingFrame = thresholdedFrame.Clone();
         }
         else if (playingState == 2)
         {
             CvInvoke.CvtColor(originalFrame, originalFrame, Emgu.CV.CvEnum.ColorConversion.Bgra2Gray, 1);
             mog2.Apply(originalFrame, thresholdedFrame, -1);
             displayingFrame = thresholdedFrame.Clone();
         }
         // Use another thread to update UI
         this.Dispatcher.Invoke(() =>
         {
             BitmapImage bitmapImage = new BitmapImage();
             using (MemoryStream memoryLocation = new MemoryStream())
             {
                 displayingFrame.Bitmap.Save(memoryLocation, ImageFormat.Png);
                 memoryLocation.Position = 0;
                 bitmapImage.BeginInit();
                 bitmapImage.StreamSource = memoryLocation;
                 bitmapImage.CacheOption  = BitmapCacheOption.OnLoad;
                 bitmapImage.EndInit();
             }
             ImageViewer.Source = bitmapImage;
         });
     }
     catch (Exception err)
     {
         System.Windows.MessageBox.Show("Something went wrong!\n" + err.ToString(), "Error!", MessageBoxButton.OK, MessageBoxImage.Asterisk);
     }
 }
Example #12
0
    void CamUpdate()
    {
        CvUtil.GetWebCamMat(webCamTexture, ref mat);

        mog2.Apply(mat, fg, 0.05f);
        Cv2.GaussianBlur(fg, fg, new Size(21, 21), 0);
        Cv2.Threshold(fg, fg, 30, 255, ThresholdTypes.Binary);
        Cv2.Dilate(fg, fg, nm, default(Point?), 2);
        Cv2.CvtColor(fg, fg, ColorConversionCodes.GRAY2BGRA);
        Cv2.Add(mat, fg, fg);

        CvConvert.MatToTexture2D(fg, ref tex);
        rawImage.texture = tex;
    }
Example #13
0
        public void MotionDetector(SoftwareBitmap input, SoftwareBitmap output, Algorithm algorithm)
        {
            if (algorithm.AlgorithmName == "MotionDetector")
            {
                using Mat mInput     = SoftwareBitmap2Mat(input);
                using Mat mOutput    = new Mat(mInput.Rows, mInput.Cols, MatType.CV_8UC4);
                using Mat fgMaskMOG2 = new Mat(mInput.Rows, mInput.Cols, MatType.CV_8UC4);
                using Mat temp       = new Mat(mInput.Rows, mInput.Cols, MatType.CV_8UC4);

                mog2.Apply(mInput, fgMaskMOG2, (double)algorithm.AlgorithmProperties[0].CurrentValue);
                Cv2.CvtColor(fgMaskMOG2, temp, ColorConversionCodes.GRAY2BGRA);

                using Mat element = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(3, 3));
                Cv2.Erode(temp, temp, element);
                temp.CopyTo(mOutput);
                Mat2SoftwareBitmap(mOutput, output);
            }
        }
        private IEnumerable <Rect> DetectMotion(Mat frame)
        {
            Mat fgmask = new Mat();

            _segmentor.Apply(frame, fgmask);
            if (fgmask.Empty())
            {
                yield break;
            }

            Cv2.Threshold(fgmask, fgmask, 25, 255, ThresholdTypes.Binary);
            int noiseSize = 9;
            Mat kernel    = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(noiseSize, noiseSize));

            Cv2.Erode(fgmask, fgmask, kernel);
            kernel = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(noiseSize, noiseSize));
            Cv2.Dilate(fgmask, fgmask, kernel, new Point(-1, -1), 3);
            Cv2.FindContours(fgmask, out Point[][] contours, out HierarchyIndex[] hierarchies, RetrievalModes.Tree, ContourApproximationModes.ApproxSimple);
Example #15
0
        public Mat DetectForeground(Mat image, int nFrames)
        {
            fgDetector.Apply(image, fgMask0);

            if (regionOfInterest != null)
            {
                Cv2.BitwiseAnd(fgMask0, regionOfInterest, fgMask);
            }

            if (nFrames < N_FRAMES_TO_LEARN)
            {
                return(null);
            }
            else if (regionOfInterest != null)
            {
                return(fgMask);
            }
            else
            {
                return(fgMask0);
            }
        }
Example #16
0
        public Image <Bgr, byte> ReturnMovingArea(Mat frame, int minArea, bool box)
        {
            Image <Gray, byte> cur = frame.ToImage <Gray, byte>();

            var foregroundMask = cur.CopyBlank();

            foregroundMask = FilterMask(foregroundMask);

            subtractor.Apply(cur, foregroundMask);

            foregroundMask._ThresholdBinary(new Gray(100), new Gray(255));

            foregroundMask.Erode(3);
            foregroundMask.Dilate(4);
            var hierarchy = new Mat();
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            CvInvoke.FindContours(foregroundMask, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxTc89L1);

            var output = frame.ToImage <Bgr, byte>().Copy();

            for (int i = 0; i < contours.Size; i++)
            {
                if (CvInvoke.ContourArea(contours[i]) > minArea) //игнорирование маленьких контуров
                {
                    if (box == true)
                    {
                        Rectangle rect = CvInvoke.BoundingRectangle(contours[i]);
                        output.Draw(rect, new Bgr(Color.LawnGreen), 2);
                    }
                    else
                    {
                        //CvInvoke.DrawContours(output, contours[i], -1, new MCvScalar(0, 255, 0), 2, LineType.AntiAlias, hierarchy, 1);
                        CvInvoke.Polylines(output, contours[i], false, new MCvScalar(0, 255, 0), 2, LineType.Filled);
                    }
                }
            }
            return(output);
        }
Example #17
0
        private void ProcessFrameMP4(object sender, EventArgs e)
        {
            px = new Point(px1, px2);
            py = new Point(py1, py2);

            if (cap != null)
            {
                cap.Retrieve(frame, 0);
                currentframe = frame.ToImage <Bgr, byte>();


                Mat mask = new Mat();
                sub.Apply(currentframe, mask);

                Mat kernelOp = new Mat();
                Mat kernelCl = new Mat();
                Mat kernelEl = new Mat();
                Mat Dilate   = new Mat();
                kernelOp = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(-1, -1));
                kernelCl = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(11, 11), new Point(-1, -1));
                var element = CvInvoke.GetStructuringElement(ElementShape.Cross, new Size(3, 3), new Point(-1, -1));

                CvInvoke.GaussianBlur(mask, mask, new Size(13, 13), 1.5);
                CvInvoke.MorphologyEx(mask, mask, MorphOp.Open, kernelOp, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
                CvInvoke.MorphologyEx(mask, mask, MorphOp.Close, kernelCl, new Point(-1, -1), 1, BorderType.Default, new MCvScalar());
                CvInvoke.Dilate(mask, mask, element, new Point(-1, -1), 1, BorderType.Reflect, default(MCvScalar));
                CvInvoke.Threshold(mask, mask, 127, 255, ThresholdType.Binary);

                detect.Detect(mask.ToImage <Gray, byte>(), blobs);
                blobs.FilterByArea(500, 20000);
                tracks.Update(blobs, 20.0, 1, 10);

                Image <Bgr, byte> result = new Image <Bgr, byte>(currentframe.Size);
                using (Image <Gray, Byte> blobMask = detect.DrawBlobsMask(blobs))
                {
                    frame.CopyTo(result, blobMask);
                }
                CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 0, 255), 2);

                foreach (KeyValuePair <uint, CvTrack> pair in tracks)
                {
                    if (pair.Value.Inactive == 0) //only draw the active tracks.
                    {
                        int cx = Convert.ToInt32(pair.Value.Centroid.X);
                        int cy = Convert.ToInt32(pair.Value.Centroid.Y);

                        CvBlob b     = blobs[pair.Value.BlobLabel];
                        Bgr    color = detect.MeanColor(b, frame.ToImage <Bgr, Byte>());
                        result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, FontFace.HersheySimplex, 0.5, color);
                        currentframe.Draw(pair.Value.BoundingBox, new Bgr(0, 0, 255), 1);
                        Point[] contour = b.GetContour();
                        //result.Draw(contour, new Bgr(0, 0, 255), 1);

                        Point center = new Point(cx, cy);
                        CvInvoke.Circle(currentframe, center, 1, new MCvScalar(255, 0, 0), 2);

                        if (center.Y <= px.Y + 10 && center.Y > py.Y - 10 && center.X <= py.X && center.X > px.X)
                        {
                            if (pair.Key.ToString() != "")
                            {
                                if (!carid.Contains(pair.Key.ToString()))
                                {
                                    carid.Add(pair.Key.ToString());
                                    if (carid.Count == 20)
                                    {
                                        carid.Clear();
                                    }

                                    carcount++;

                                    if (carcount != countBrd + 1 && carcount != countBrd + 2 && carcount != countBrd + 3 && carcount != countBrd + 4 && carcount != countBrd + 5)
                                    {
                                        //Json Logger
                                        Logs log = new Logs()
                                        {
                                            Date = DateTime.Now.ToString(),
                                            Id   = carcount
                                        };
                                        string strResultJson = JsonConvert.SerializeObject(log);
                                        File.AppendAllText(cfg.LogSavePath + @"\log.json", strResultJson + Environment.NewLine);
                                    }
                                }
                            }

                            CvInvoke.Line(currentframe, px, py, new MCvScalar(0, 255, 0), 2);
                        }
                    }
                }


                CvInvoke.PutText(currentframe, "Count :" + carcount.ToString(), new Point(10, 25), FontFace.HersheySimplex, 1, new MCvScalar(255, 0, 255), 2, LineType.AntiAlias);
                //Frame Rate
                double framerate = cap.GetCaptureProperty(CapProp.Fps);
                Thread.Sleep((int)(1000.0 / framerate));
                if (firstCount == false && carcount == countBrd)
                {
                    Image_Name = cfg.PhotoSavePath + @"\" + "Car" + DateTime.Now.ToString("dd-mm-yyyy-hh-mm-ss") + ".jpg";
                    currentframe.Save(Image_Name);
                    sendMail = new Thread(SendMail);
                    sendMail.Start();
                    firstCount = true;
                }



                if (isRecording)
                {
                    if (firstFrameTime != null)
                    {
                        writer.WriteVideoFrame(currentframe.Bitmap, DateTime.Now - firstFrameTime.Value);
                    }
                    else
                    {
                        writer.WriteVideoFrame(currentframe.Bitmap);
                        firstFrameTime = DateTime.Now;
                    }
                }

                //pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage;
                pictureBox1.Image = currentframe.Bitmap;
            }
        }
Example #18
0
        // updates the display image - image processing happens here
        private void UpdateImage()
        {
            imageLbl.Text = "Image " + (_currImage + 1);

            var tempImg = _seqImages[_currImage].Copy();

            Image <Gray, Byte> fgMask = _seqImages[_currImage].CopyBlank();

            // apply the first image and the current image to the background subtractor
            // this helps to detect stationary objects while still taking into account the last frame
            _subMog2.Apply(_seqImages[0], fgMask);
            _subMog2.Apply(tempImg, fgMask);

            // makes a BGR display image so coloured lines can be drawn
            var dispImg = _seqImages[_currImage].Convert <Bgr, Byte>();

            // find contours based on the movement detected by background subtractor
            var maskContours = new VectorOfVectorOfPoint();

            // eroded once to remove noise, dilated 6 times to blobs of larger contours
            CvInvoke.FindContours(fgMask.Erode(1).Dilate(6), maskContours, null, Emgu.CV.CvEnum.RetrType.List, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone);

            // iterate through the contours, finding the largest one within thresholds
            int    biggestContour = -1;
            double contourSize = 0, currSize, simularity;

            for (int i = 0; i < maskContours.Size; i++)
            {
                currSize = CvInvoke.ContourArea(maskContours[i]);
                if (currSize > contourSize && currSize > 600 && currSize < 6000)
                {
                    biggestContour = i;
                    contourSize    = currSize;
                }
            }

            // if a contour exists
            if (biggestContour != -1)
            {
                // get the contours of the template for matching
                VectorOfVectorOfPoint tempContours = new VectorOfVectorOfPoint();
                CvInvoke.FindContours(new Image <Gray, Byte>(_templatePath).Not(), tempContours, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxSimple);

                // match contour with template using Hu moments
                simularity = CvInvoke.MatchShapes(maskContours[biggestContour], tempContours[0], Emgu.CV.CvEnum.ContoursMatchType.I1);

                // find the centre of the contour for drawing lines
                var newPosition = GetContourCentroid(maskContours[biggestContour]);
                // update the path history
                UpdateTrail(newPosition, simularity);

                // display the trail and simularities on the output image
                dispImg = DrawTrail(dispImg);

                // draw on a rotated bounding rect and the centre of the contour
                dispImg.Draw(CvInvoke.MinAreaRect(maskContours[biggestContour]), new Bgr(0, 255, 0), 2);
                dispImg.Draw(new CircleF(newPosition, 2), new Bgr(0, 255, 0), 2);
            }
            // if no contour exists
            else
            {
                dispImg = DrawTrail(dispImg);

                // end the current path
                if (_paths.Last().Points.Count > 0)
                {
                    _paths.Add(new PathHistory());
                }
            }

            // put this on the screen
            picBox.Image = dispImg.ToBitmap();
        }
        static void montion_detect_v2(System.Threading.EventWaitHandle quitEvent = null)
        {
            TcpClient client = new TcpClient();

            try
            {
                string root = System.IO.Path.Combine(System.Environment.GetEnvironmentVariable("FDHOME"), "AVIA", "frames");
                Regex  r    = new Regex(@"^ACK frame (.+)\s*$", RegexOptions.IgnoreCase);
                client.Connect(IPAddress.Loopback, 6280);
                NetworkStream            ns   = client.GetStream();
                byte[]                   cmd  = System.Text.Encoding.UTF8.GetBytes("QueryFrame\n");
                byte[]                   data = new byte[1024];
                BackgroundSubtractorMOG2 bgs  = new BackgroundSubtractorMOG2();
                bool monition            = false;
                Image <Bgr, Byte> bg_img = null;
                while (true)
                {
                    System.Threading.Thread.Sleep(500);
                    ns.Write(cmd, 0, cmd.Length);
                    int    read = ns.Read(data, 0, data.Length);
                    string str  = System.Text.Encoding.UTF8.GetString(data, 0, read);
                    Match  m    = r.Match(str);
                    if (m.Success)
                    {
                        Mat cm   = CvInvoke.Imread(System.IO.Path.Combine(root, m.Groups[1].Value));
                        Mat mask = new Mat();
                        bgs.Apply(cm, mask);
                        Image <Gray, Byte> g = mask.ToImage <Gray, Byte>();
                        Gray ga = g.GetAverage();
                        if (ga.MCvScalar.V0 > 11)
                        {
                            // montion
                            if (!monition)
                            {
                                Program.logIt("motion detected!");
                                Console.WriteLine("Detected montion.");
                                monition = true;
                                System.Threading.Thread.Sleep(500);
                            }
                        }
                        else
                        {
                            // no montion
                            if (monition)
                            {
                                Program.logIt("motion stopped!");
                                Console.WriteLine("Montion stopped.");
                                monition = false;
                                CvInvoke.Rotate(cm, cm, RotateFlags.Rotate90CounterClockwise);
                                if (bg_img == null)
                                {
                                    bg_img = cm.ToImage <Bgr, Byte>();
                                }
                                if (!handle_motion(cm.ToImage <Bgr, Byte>(), bg_img))
                                {
                                    bg_img = cm.ToImage <Bgr, Byte>();
                                }
                            }
                        }

                        GC.Collect();
                        if (System.Console.KeyAvailable)
                        {
                            ConsoleKeyInfo ki = Console.ReadKey();
                            if (ki.Key == ConsoleKey.Escape)
                            {
                                Program.logIt("Monitor will terminated by ESC pressed.");
                                break;
                            }
                        }
                        if (quitEvent != null)
                        {
                            if (quitEvent.WaitOne(0))
                            {
                                Program.logIt("Monitor will terminated by event set.");
                                break;
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Program.logIt(ex.Message);
                Program.logIt(ex.StackTrace);
            }
        }
        static void montion_detect(System.Threading.EventWaitHandle quitEvent = null)
        {
            VideoCapture vc = new VideoCapture(0);

            if (vc.IsOpened)
            {
                bool b = vc.SetCaptureProperty(CapProp.Mode, 0);
                b = vc.SetCaptureProperty(CapProp.FrameHeight, 1944);
                b = vc.SetCaptureProperty(CapProp.FrameWidth, 2592);
                BackgroundSubtractorMOG2 bgs = new BackgroundSubtractorMOG2();
                bool monition            = false;
                Image <Bgr, Byte> bg_img = null;
                while (true)
                {
                    Mat cm = new Mat();
                    vc.Read(cm);
                    Mat mask = new Mat();
                    bgs.Apply(cm, mask);
                    Image <Gray, Byte> g = mask.ToImage <Gray, Byte>();
                    Gray ga = g.GetAverage();
                    if (ga.MCvScalar.V0 > 11)
                    {
                        // montion
                        if (!monition)
                        {
                            Program.logIt("motion detected!");
                            Console.WriteLine("Detected montion.");
                            monition = true;
                            System.Threading.Thread.Sleep(500);
                        }
                    }
                    else
                    {
                        // no montion
                        if (monition)
                        {
                            Program.logIt("motion stopped!");
                            Console.WriteLine("Montion stopped.");
                            monition = false;
                            vc.Read(cm);
                            CvInvoke.Rotate(cm, cm, RotateFlags.Rotate90CounterClockwise);
                            if (bg_img == null)
                            {
                                bg_img = cm.ToImage <Bgr, Byte>();
                            }
                            if (!handle_motion(cm.ToImage <Bgr, Byte>(), bg_img))
                            {
                                //bg_img = cm.ToImage<Bgr, Byte>();
                            }
                        }
                    }

                    GC.Collect();
                    if (System.Console.KeyAvailable)
                    {
                        ConsoleKeyInfo ki = Console.ReadKey();
                        if (ki.Key == ConsoleKey.Escape)
                        {
                            Program.logIt("Monitor will terminated by ESC pressed.");
                            break;
                        }
                    }
                    if (quitEvent != null)
                    {
                        if (quitEvent.WaitOne(0))
                        {
                            Program.logIt("Monitor will terminated by event set.");
                            break;
                        }
                    }
                }
            }
        }
Example #21
0
        private void Process(object sender, EventArgs e)
        {
            lblCarCount.Text = carcount.ToString();
            if (bs == null)
            {
                bs = new BackgroundSubtractorMOG2(250, 25, true);
            }
            if (videoCapture != null && isPlaying)
            {
                if (videoCapture.IsOpened)
                {
                    imgOriginal = videoCapture.QueryFrame();
                    if (imgOriginal != null)
                    {
                        imgResult = imgOriginal.Clone();
                        using (Mat tempimg = imgOriginal.Clone())
                        {
                            imgBS = new Mat();
                            int gaussianBlurSize = tbGaussianBlur.Value % 2 == 0 ? tbGaussianBlur.Value + 1 : tbGaussianBlur.Value;
                            CvInvoke.GaussianBlur(tempimg, tempimg, new Size(gaussianBlurSize, gaussianBlurSize), 1.0);
                            bs.Apply(tempimg, imgBS);
                            CvInvoke.Threshold(imgBS, imgBS, tbThreshold.Value, 255, ThresholdType.Binary);
                            switch (cbDilEroOrder.SelectedIndex)
                            {
                            case 0:
                                CvInvoke.Dilate(imgBS, imgBS, structuringElement, new Point(-1, -1), tbDilateIter.Value, BorderType.Default, new MCvScalar());
                                CvInvoke.Erode(imgBS, imgBS, structuringElement, new Point(-1, -1), tbErodeIter.Value, BorderType.Default, new MCvScalar());
                                break;

                            case 1:
                                CvInvoke.Erode(imgBS, imgBS, structuringElement, new Point(-1, -1), tbErodeIter.Value, BorderType.Default, new MCvScalar());
                                CvInvoke.Dilate(imgBS, imgBS, structuringElement, new Point(-1, -1), tbDilateIter.Value, BorderType.Default, new MCvScalar());
                                break;
                            }
                            VectorOfVectorOfPoint contours         = new VectorOfVectorOfPoint();
                            VectorOfVectorOfPoint filteredContours = new VectorOfVectorOfPoint();
                            countingLane = new Rectangle(tbCountingLanePosX.Value, tbCountingLanePosY.Value, tbCountingLaneSizeWidth.Value, tbCountingLaneSizeHeight.Value);
                            if (cbxCounting.Checked)
                            {
                                CvInvoke.FindContours(imgBS, contours, null, RetrType.External, ChainApproxMethod.ChainApproxSimple);
                                for (int i = 0; i < contours.Size; i++)
                                {
                                    double area = CvInvoke.ContourArea(contours[i]);
                                    if (area > tbMinSize.Value)
                                    {
                                        Rectangle rect        = CvInvoke.BoundingRectangle(contours[i]);
                                        Point     middlePoint = CenterOfRect(rect);
                                        CvInvoke.Circle(imgResult, middlePoint, 1, SCALAR_BLUE, 2);
                                        if (countingLane.Contains(middlePoint))
                                        {
                                            CvInvoke.Rectangle(imgResult, rect, SCALAR_RED, 2);
                                            LineSegment2D line = new LineSegment2D();
                                            if (cbxIncoming.Checked)
                                            {
                                                line.P1 = new Point(countingLane.Left, countingLane.Bottom);
                                                line.P2 = new Point(countingLane.Right, countingLane.Bottom);
                                            }
                                            else
                                            {
                                                line.P1 = new Point(countingLane.Left, countingLane.Top);
                                                line.P2 = new Point(countingLane.Right, countingLane.Top);
                                            }
                                            CvInvoke.Line(imgResult, line.P1, line.P2, SCALAR_WHITE, 1);
                                            if (pointCrossLine(middlePoint, line))
                                            {
                                                carcount++;
                                                Console.WriteLine($"Car count: {carcount}");
                                                CvInvoke.Line(imgResult, line.P1, line.P2, SCALAR_YELLOW, 3);
                                            }
                                        }
                                    }
                                }
                            }
                            else
                            {
                                CvInvoke.Rectangle(imgResult, countingLane, SCALAR_RED, 1);
                                if (cbxIncoming.Checked)
                                {
                                    CvInvoke.Line(imgResult, new Point(countingLane.Left, countingLane.Bottom), new Point(countingLane.Right, countingLane.Bottom), SCALAR_YELLOW, 3);
                                }
                                else
                                {
                                    CvInvoke.Line(imgResult, new Point(countingLane.Left, countingLane.Top), new Point(countingLane.Right, countingLane.Top), SCALAR_YELLOW, 3);
                                }
                            }

                            ibResult.Image   = imgResult;
                            ibOriginal.Image = imgOriginal;
                            ibBs.Image       = imgBS;
                        }
                    }
                    else
                    {
                        videoCapture.SetCaptureProperty(CapProp.PosFrames, 0.0);
                    }
                }
            }
        }
        static void montion_detect_1()
        {
            VideoCapture vc = new VideoCapture(0);

            if (vc.IsOpened)
            {
                double db = vc.GetCaptureProperty(CapProp.Mode);
                //bool b = vc.SetCaptureProperty(CapProp.Mode, 1);
                bool b = vc.SetCaptureProperty(CapProp.Mode, 0);
                b = vc.SetCaptureProperty(CapProp.FrameHeight, 1944);
                b = vc.SetCaptureProperty(CapProp.FrameWidth, 2592);
                if (vc.Grab())
                {
                    Mat m = new Mat();
                    if (vc.Retrieve(m))
                    {
                        m.Save("temp_1.jpg");
                    }
                }
                //VideoWriter v1 = new VideoWriter("test_1.mp4", (int)vc.GetCaptureProperty(CapProp.Fps), new Size((int)vc.GetCaptureProperty(CapProp.FrameWidth), (int)vc.GetCaptureProperty(CapProp.FrameHeight)), true);
                //VideoWriter v2 = new VideoWriter("test_2.mp4", (int)vc.GetCaptureProperty(CapProp.Fps), new Size((int)vc.GetCaptureProperty(CapProp.FrameWidth), (int)vc.GetCaptureProperty(CapProp.FrameHeight)), true);
                BackgroundSubtractorMOG2 bgs = new BackgroundSubtractorMOG2();
                bool monition             = false;
                Mat  k                    = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(3, 3), new Point(1, 1));
                Image <Gray, Byte> bg_img = null;
                int index                 = 1;
                Console.WriteLine("Camera is ready. Press Esc to exit.");
                bool device_in_place = false;
                while (true)
                {
                    Mat cm = new Mat();
                    vc.Read(cm);
                    Mat mask = new Mat();
                    bgs.Apply(cm, mask);
                    //v1.Write(cm);
                    //v2.Write(mask);
                    //img = img.MorphologyEx(MorphOp.Erode, k, new Point(-1, -1), 3, BorderType.Default, new MCvScalar(0));
                    //CvInvoke.MorphologyEx(mask, mask, MorphOp.Erode, k, new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0));
                    MCvScalar mean   = new MCvScalar();
                    MCvScalar stdDev = new MCvScalar();
                    CvInvoke.MeanStdDev(mask, ref mean, ref stdDev);
                    if (mean.V0 > 17)
                    {
                        if (!monition)
                        {
                            Program.logIt("motion detected!");
                            Console.WriteLine("Detected montion.");
                            monition = true;
                        }
                    }
                    else
                    {
                        if (monition)
                        {
                            Program.logIt("motion stopped!");
                            Console.WriteLine("Montion stopped.");
                            monition = false;
#if true
                            if (bg_img == null)
                            {
                                bg_img = cm.ToImage <Gray, Byte>().Rotate(-90, new Gray(0), false);
                                //bg_img.Save("temp_bg.jpg");
                            }
                            else
                            {
                                device_in_place = handle_motion_V2(cm.ToImage <Bgr, Byte>().Rotate(-90, new Bgr(0, 0, 0), false), bg_img, index++);
                                if (!device_in_place)
                                {
                                    bg_img = cm.ToImage <Gray, Byte>().Rotate(-90, new Gray(0), false);
                                }
                                //Rectangle r = new Rectangle(196, 665, 269, 628);
                                //// check needed.
                                //{
                                //    Image<Gray, Byte> img = cm.ToImage<Gray, Byte>().Rotate(-90, new Gray(0), false);
                                //    img.Save($"temp_{index}.jpg");
                                //    img = img.AbsDiff(bg_img);
                                //    if (img.GetAverage().MCvScalar.V0 > 10)
                                //    {

                                //    }
                                //    img.Save($"temp_{index}_diff.jpg");
                                //}
                                //{
                                //    Image<Bgr, Byte> img = cm.ToImage<Bgr, Byte>().Rotate(-90, new Bgr(0, 0, 0), false);
                                //    Image<Bgr, Byte> img1 = img.Copy(r);
                                //    img1.Save($"temp_{index}_1.jpg");
                                //}
                                //index++;
                            }
#else
                            if (!device_in_place)
                            {
                                bg_img = cm.ToImage <Gray, Byte>().Rotate(-90, new Gray(0), false);
                            }
                            device_in_place = handle_motion(cm.ToImage <Bgr, Byte>().Rotate(-90, new Bgr(0, 0, 0), false), bg_img, index++);
#endif
                        }
                    }
                    GC.Collect();
                    if (System.Console.KeyAvailable)
                    {
                        ConsoleKeyInfo ki = Console.ReadKey();
                        if (ki.Key == ConsoleKey.Escape)
                        {
                            break;
                        }
                    }
                }
            }
        }
Example #23
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = _capture.QueryFrame();

            if (frame == null)
            {
                if (mEtapa == 1)
                {
                    preencherParametrosMapeamento();
                }
                _capture.Dispose();
                return;
            }
            mContadorDeFrames++;
            if (mEtapa == 0)
            {
                verificarEatualizarParametrosCalibracao();
            }
            _capture.Retrieve(frame, 0);

            Image <Bgr, Byte> smoothedFrame = new Image <Bgr, byte>(frame.Size);

            CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(parametros.AlphaMediaMovel, parametros.AlphaMediaMovel), parametros.AlphaMediaMovel); //filter out noises

            // use the BG/FG detector to find the forground mask
            Mat forgroundMask = new Mat();

            mDetector.Apply(smoothedFrame, forgroundMask);
            //CvInvoke.AbsDiff(smoothedFrame, forgroundMask.ToImage<Bgr, byte>(), vPlanoFundo);

            mblobs = new CvBlobs();
            mBlobDetector.Detect(forgroundMask.ToImage <Gray, byte>(), mblobs);
            mblobs.FilterByArea(100, int.MaxValue);
            if (mEtapa == 0)
            {
                mJanelaCalibracao.Imagem.Image = frame;
                Mat vCopiaMenorBinaria = new Mat();
                CvInvoke.Resize(forgroundMask, vCopiaMenorBinaria, new Size(0, 0), 0.7, 0.7, Inter.Area);
                mJanelaCalibracao.PlanoDeFundo.Image = smoothedFrame;
                mJanelaCalibracao.Objetos.Image      = vCopiaMenorBinaria;
            }
            if (mEtapa == 1)
            {
                mJanelaAreaRestrita.Imagem.Image = frame;
            }
            if (mEtapa == 2)
            {
                mJanelaMonitoramento.ImagemMonitorada.Image = frame;
            }
            mImagemColorida = frame;
            if (mEtapa == 0)
            {
                desenharParametroTamanhoPessoa();
                desenharRetanguloPessoa();
            }
            if (mEtapa == 1)
            {
                desenharEMapear();
            }
            if (mEtapa == 2)
            {
                atualizarParametros(parametros);
                desenharEprocessar();
            }
        }