Ejemplo n.º 1
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();      //Matrix to save the picture
            capture.Retrieve(frame, 0); //retrieve the picture to the matrinx
            Image<Bgr, byte> image = frame.ToImage<Bgr, byte>();
            FaceNo = 0;
            if (frame != null)
            {
                Image<Gray, byte> grayFrame = frame.ToImage<Gray, byte>(); // display the image in the imageBox
                faces = cascade.DetectMultiScale(grayFrame, 1.1, 2, new Size(30, 30));

                Bitmap BitmapInput = grayFrame.ToBitmap();
                Bitmap ExtractedFace;
                Graphics FaceCanvas;
                //countTable.Text = faces.Count().ToString();
                if (faces.Count() > 0)
                {
                    foreach (var face in faces)
                    {
                        image.Draw(face, new Bgr(Color.Blue), 1); // draw rectangles in the picture
                        ExtractedFace = new Bitmap(face.Width, face.Height);
                        FaceCanvas = Graphics.FromImage(ExtractedFace);
                        FaceCanvas.DrawImage(BitmapInput, 0, 0, face, GraphicsUnit.Pixel);
                        ExtFaces.Add(ExtractedFace);
                        FaceNo++;
                    }
                }
                imageBox1.Image = image; // display the image in the imageBox
            }
        }
Ejemplo n.º 2
0
      void ProcessFrame(object sender, EventArgs e)
      {
         Mat frame = _cameraCapture.QueryFrame();
         Mat smoothedFrame = new Mat();
         CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(3, 3), 1); //filter out noises
         //frame._SmoothGaussian(3); 

         #region use the BG/FG detector to find the forground mask
         Mat forgroundMask = new Mat();
         _fgDetector.Apply(smoothedFrame, forgroundMask);
         #endregion

         CvBlobs blobs = new CvBlobs();
         _blobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), blobs);
         blobs.FilterByArea(100, int.MaxValue);

         float scale = (frame.Width + frame.Width)/2.0f;
         _tracker.Update(blobs, 0.01 * scale, 5, 5);
        
         foreach (var pair in _tracker)
         {
            CvTrack b = pair.Value;
            CvInvoke.Rectangle(frame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 2);
            CvInvoke.PutText(frame,  b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
         }

         imageBox1.Image = frame;
         imageBox2.Image = forgroundMask;
      }
Ejemplo n.º 3
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            if (_transmitTask == null || _transmitTask.IsCompleted)
            {
                using (var matCaptured = new Mat())
                {
                    CameraCapture.Retrieve(matCaptured);
                    var bgrImage = matCaptured.ToImage<Bgr, byte>();
                    WriteText(bgrImage, 30, DateTime.Now.ToString("HH:mm:ss tt"));
                    imageBoxCaptured.Image = bgrImage;

                    IImageTransmitter transmitter = null;
                    if (radBsonImage.Checked)
                    {
                        transmitter = _imageTransmitter;
                    }

                    if (radBsonJpeg.Checked)
                    {
                        transmitter = _jpegTransmitter;
                    }

                    if (transmitter != null)
                    {
                        _transmitTask = transmitter.Transmit(bgrImage);
                    }
                }
            }
        }
Ejemplo n.º 4
0
        public void guardarArchivo(Emgu.CV.Mat img, String name)
        {
            Emgu.CV.Image <Emgu.CV.Structure.Bgr, Byte> memoryImageOut = img.ToImage <Emgu.CV.Structure.Bgr, Byte>();

            Bitmap memoryImageT = memoryImageOut.ToBitmap();

            memoryImageT.Save(ruta + "/files/" + name + ".bmp");
        }
Ejemplo n.º 5
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            using (var matCaptured = new Mat())
            {
                CameraCapture.Retrieve(matCaptured);

                var grayImage = matCaptured.ToImage<Gray, byte>();

                #region circle detection
                var watch = Stopwatch.StartNew();
                double cannyThreshold = 180.0;
                double circleAccumulatorThreshold = 120;
                CircleF[] circles = CvInvoke.HoughCircles(
                    grayImage
                    , HoughType.Gradient
                    , 2.0
                    , 40.0
                    , cannyThreshold
                    , circleAccumulatorThreshold
                    , 5);

                watch.Stop();
                NotifyStatus("{0} Hough circles in {1}; ", circles.Length, watch.Elapsed.ToHumanReadable());
                #endregion

                #region draw circles
                var circleImage = matCaptured.ToImage<Bgr, byte>();
                foreach (CircleF circle in circles)
                {
                    circleImage.Draw(circle, new Bgr(Color.Green), 10);
                }
                #endregion

                imageBoxCaptured.Image = circleImage;
            }
        }
Ejemplo n.º 6
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            using (var frame = new Mat())
            {
                CameraCapture.Retrieve(frame);
                var input = new MotionDetectorInput();

                var inputImage = frame.ToImage<Bgr,byte>();
                input.Captured = frame;
                input.Settings = _currentSettings;

                var output = _motionDetector.Process(input);

                var bgrRed = new Bgr(Color.Red);
                var bgrBlue = new Bgr(Color.Blue);

                foreach (var motionRegion in output.MotionSections)
                {
                    var text = string.Format("A={0}, M={1}", motionRegion.Area, motionRegion.PixelsInMotionCount);
                    inputImage.Draw(motionRegion.Region, bgrRed);
                    if (chkRectangleStats.Checked)
                    {
                        inputImage.Draw(text, motionRegion.Region.Location, Emgu.CV.CvEnum.FontFace.HersheyComplexSmall, .8, bgrRed);
                    }
                    DrawMotion(output.MotionImage, motionRegion.Region, motionRegion.Angle, bgrRed);
                }

                DrawMotion(output.MotionImage, new Rectangle(Point.Empty, output.MotionImage.Size), output.OverallAngle, new Bgr(Color.Green));

                if (output.BiggestMotion != null)
                {
                    var motion = output.BiggestMotion;
                    inputImage.Draw(motion.Region, bgrBlue);
                }

                imageBoxCaptured.Image = inputImage;
                imageBoxMasked.Image = output.ForegroundImage;
                imageBoxMotion.Image = output.MotionImage;

                NotifyStatus(
                    "Motion detection took {0}. {1} motions, {2} over all pixel count"
                    , output.Elapsed.ToHumanReadable()
                    , output.MotionSections.Count
                    , output.OverallMotionPixelCount);
            }
        }
Ejemplo n.º 7
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            if (_detector == null)
            {
                return;
            }
            using (var matCaptured = new Mat())
            {
                CameraCapture.Retrieve(matCaptured);
                var input = new CascadeDetectorInput {Captured = matCaptured};
                input.ClassifierParams = _classiferParams;
                var result = _detector.Process(input);
                var image = matCaptured.ToImage<Bgr, byte>();

                foreach (Rectangle item in result.Objects)
                {
                    image.Draw(item, new Bgr(Color.Blue), 2);
                }

                imageBoxCaptured.Image = image;
            }
        }
Ejemplo n.º 8
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = new Mat();
            Image<Bgr, Byte> frame1;
            frameNum = _capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames);
            _capture.Retrieve(frame, 0);
            frame1 = frame.ToImage<Bgr, Byte>();
            frame1 = frame1.Resize(.5, Emgu.CV.CvEnum.Inter.Cubic);
            frame = frame1.Mat;
            //MessageBox.Show(_capture.Height + " " + _capture.Width + "\n" + frame1.Height + " " + frame1.Width);
            if (frame != null)
            {
                using (UMat ugray = new UMat())
                {
                    CvInvoke.CvtColor(frame, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
                    CvInvoke.EqualizeHist(ugray, ugray);
                    Rectangle[] breastDetected = cascadeBreast.DetectMultiScale(
                       ugray,
                       1.1,
                       30,
                       new Size(20, 20));
                    /*Rectangle[] pussyDetected = cascadePuss.DetectMultiScale(
                       ugray,
                       1.1,
                       30,
                       new Size(20, 20));
                    Rectangle[] dickDetected = cascadePen.DetectMultiScale(
                       ugray,
                       1.1,
                       35,
                       new Size(20, 20));*/
                    progressBar1.Invoke(new MethodInvoker(delegate { progressBar1.Increment(1); label1.Text = frameNum.ToString();}));
                    
                }
                
            }
            

        }
        //detect method runs on it's own thread
        public void detect()
        {
            while (!Stop)
            {
                //if the frame image is updated
                if (updated)
                {
                    //not safe to set the frame image
                    Finished = false;
                    using (Mat bgImage = _image.Clone())
                    using (Mat mask = new Mat())
                    {
                        Finished = true;
                        updated = false;
                        //get mask
                        _foregroundDetector.Apply(bgImage, mask, -1);
                        //set blob image
                        _bImage.setBlobImage(mask.ToImage<Gray, Byte>().Clone());

                    }
                }
            }
        }
Ejemplo n.º 10
0
        public static BitArray testLine <TColor, TDepth>(Emgu.CV.Mat imageLine, TDepth[] channelThresh, int row = 0)
            where TColor : struct, IColor
            where TDepth : IComparable <TDepth>, new()
        {
            if (imageLine.NumberOfChannels < channelThresh.Length)
            {
                throw new IndexOutOfRangeException("Array exceeds number of channels in image.");
            }

            Image <TColor, TDepth> tempImage = imageLine.ToImage <TColor, TDepth>();
            BitArray outArray = new BitArray(imageLine.Width);

            for (int pixel = 0; pixel < imageLine.Width; pixel++)
            {
                bool temp = true;
                for (int channel = 0; channel < channelThresh.Length; channel++)
                {
                    temp &= channelThresh[channel].CompareTo(tempImage.Data[row, pixel, channel]) <= 0;
                }
                outArray[pixel] = temp;
            }
            return(outArray);
        }
Ejemplo n.º 11
0
        public static ProcessResult Process(Mat image, out SessionPoint[] points)
        {
            IntPtr outPtr;
            int outSize;
            int result;

            using (var i = image.ToImage<Bgr, byte>()) {
                result = Process(i.Ptr, out outPtr, out outSize);
            }
            points = new SessionPoint[outSize];
            for (var i = 0; i < outSize; i++) {
                points[i] = (SessionPoint)Marshal.PtrToStructure(new IntPtr(outPtr.ToInt64() + (i * Size)), Type);
            }
            switch (result) {
                case 1:
                    return ProcessResult.EmptyImage;
                case 2:
                    return ProcessResult.NotDetected;
                case 3:
                    return ProcessResult.Success;
                default:
                    return ProcessResult.Error;
            }
        }
        private void ProcessFrame(object sender, EventArgs e)
        {
            if (BeforeProcessFrame != null)
                BeforeProcessFrame();

            Mat curImage = new Mat();
            var imageGrabbed = capture.Retrieve(curImage);
            {
                if (!imageGrabbed)
                {
                    capture.Stop();
                    return;
                }
                if (counter == 0)
                {

                }

                Interlocked.Increment(ref counter);

                if (counter == 1)
                {
                }

                if (counter % FrameInterval != 0)
                {
                    if (counter % FrameInterval == 1)
                    {
                        prevImage = new Image<Bgr, byte>(curImage.Bitmap);
                    }
                    return;
                }

                //Process starts here

                var prevGray = prevImage.Convert<Gray, byte>();
                var curGray = curImage.ToImage<Gray, byte>();

                var denseResult = new DenseOpticalFlowAlgorithm(prevGray, curGray, Parameters)
                {
                    VectorNoiseThreshold = VectorGlobalThreshold,
                    WindowSideLength = WindowSideLength,
                    ComputeFlowImage = ComputeFlowImage
                }.Compute();

                if (OpticalFlowVectorsProcessed != null)
                {
                    OpticalFlowVectorsProcessed(denseResult.FlowVectorImage);
                }

                OpticalFlowPoint[] pointsToCluster = denseResult.FlowLineArray
                    .Where(x => x.OverThreshold)
                    .Select(x => new OpticalFlowPoint(
                        x.Line.P2,
                        x.Line.GetExteriorAngleDegree(DenseOpticalFlowAlgorithm.UnitVectorOfX)))
                    .ToArray();

                var clusters =
                    new DbscanAlgorithm<OpticalFlowPoint>(
                        (p, x) => p - x,
                        Math.Abs(OrientationAngle) <= 0 || Math.Abs(OrientationAngle) >= 360
                            ? (Func<OpticalFlowPoint, OpticalFlowPoint, bool>)null
                            : (point, x) => Math.Abs(point.ExteriorAngleDegree - x.ExteriorAngleDegree)  < OrientationAngle)
                        .ComputeClusterDbscan(pointsToCluster, Epsilon, MinPts);

                #region Draw vectors per frame

                var processedImage = new Image<Bgr, byte>(curImage.Bitmap);
                foreach (var line in denseResult.FlowLineArray.Where(x => x.OverThreshold).Select(x => x.Line).ToArray())
                {
                    processedImage.Draw(new CircleF(line.P2, 2), new Bgr(Color.Yellow));
                }

                #endregion

                //draw cluster rectangles
                foreach (var points in clusters)
                {
                    var polyLines = PointCollection.BoundingRectangle(points.Select(x => x.EndPoint).ToArray());
                    processedImage.Draw(polyLines, new Bgr(Color.Red), 2);
                }

                //draw convex hull
                foreach (var points in clusters)
                {
                    PointF[] hull = CvInvoke.ConvexHull(points.Select(x => x.EndPoint).ToArray());
                    processedImage.DrawPolyline(Array.ConvertAll(hull, Point.Round), true, new Bgr(Color.Blue));
                }

                ImageProcessed(processedImage);
            }
        }
Ejemplo n.º 13
0
        // will be simplified
        public void ProcessFrame(object sender, EventArgs arg)
        {
            //get frame from the Camera

            Mat frame = new Mat();
            Mat grayFrame = new Mat();

            double thresh_med;
            double thresh_e2;

            System.Drawing.Image OrgImage;
            System.Drawing.Image OrgImage1;

            _capture.Retrieve(frame);

            CvInvoke.CvtColor(frame, grayFrame, ColorConversion.Bgr2Gray);
            Image<Rgb, Byte> tothermo = grayFrame.ToImage<Rgb, Byte>(); // original
            Image<Gray, Byte> tothermo1 = grayFrame.ToImage<Gray, Byte>();

            OrgImage = tothermo.ToBitmap();
            OrgImage1 = tothermo.ToBitmap();

            pictureBox4.Image = OrgImage1;

            _imagedata.MakeFalse((Bitmap)OrgImage);
            pictureBox1.Image = OrgImage;

            pictureBox1.Refresh();
            pictureBox4.Refresh();

            double[] minVal;
            double[] maxVal;
            System.Drawing.Point[] minLoc;
            System.Drawing.Point[] maxLoc;
            grayFrame.MinMax(out minVal, out maxVal, out minLoc, out maxLoc);

            _imagedata.sizex = OrgImage.Width;
            _imagedata.sizey = OrgImage.Height;

            if (hand_mode)
            {
                _imagedata.centerx = mouse_x;
                _imagedata.centery = mouse_y;
            }
            else
            {
                _imagedata.centerx = maxLoc[0].X;
                _imagedata.centery = maxLoc[0].Y;
            }

            _imagedata.GraphFill(tothermo1);

            // thresh calculate
            if (hand_mode)
            {
                thresh_med = grayFrame.Bitmap.GetPixel(_imagedata.centerx, _imagedata.centery).R / 2;
                thresh_e2 = grayFrame.Bitmap.GetPixel(_imagedata.centerx, _imagedata.centery).R / Math.Exp(2);
            }
            else
            {
                thresh_med = maxVal[0] / 2;
                thresh_e2 = maxVal[0] / Math.Exp(2);
            }

            _beamparameters.BeamSizeDetect(thresh_med, thresh_e2, _imagedata);
        }
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Mat frame = _capture.QueryFrame();
            if (frame == null)
            {
                if (mEtapa == 1)
                    preencherParametrosMapeamento();
                _capture.Dispose();
                return;
            }
            mContadorDeFrames++;
            if (mEtapa == 0)
            {
                verificarEatualizarParametrosCalibracao();
            }
            _capture.Retrieve(frame, 0);

            Image<Bgr, Byte> smoothedFrame = new Image<Bgr, byte>(frame.Size);
            CvInvoke.GaussianBlur(frame, smoothedFrame, new Size(parametros.AlphaMediaMovel, parametros.AlphaMediaMovel), parametros.AlphaMediaMovel); //filter out noises

            // use the BG/FG detector to find the forground mask
            Mat forgroundMask = new Mat();
            mDetector.Apply(smoothedFrame, forgroundMask);
            //CvInvoke.AbsDiff(smoothedFrame, forgroundMask.ToImage<Bgr, byte>(), vPlanoFundo);

            mblobs = new CvBlobs();
            mBlobDetector.Detect(forgroundMask.ToImage<Gray, byte>(), mblobs);
            mblobs.FilterByArea(100, int.MaxValue);
            if (mEtapa == 0)
            {
                mJanelaCalibracao.Imagem.Image = frame;
                Mat vCopiaMenorBinaria = new Mat();
                CvInvoke.Resize(forgroundMask, vCopiaMenorBinaria, new Size(0, 0), 0.7, 0.7, Inter.Area);
                mJanelaCalibracao.PlanoDeFundo.Image = smoothedFrame;
                mJanelaCalibracao.Objetos.Image = vCopiaMenorBinaria;
            }
            if (mEtapa == 1)
            {
                mJanelaAreaRestrita.Imagem.Image = frame;
            }
            if (mEtapa == 2)
            {
                mJanelaMonitoramento.ImagemMonitorada.Image = frame;
            }
            mImagemColorida = frame;
            if (mEtapa == 0)
            {
                desenharParametroTamanhoPessoa();
                desenharRetanguloPessoa();
            }
            if (mEtapa == 1)
            {
                desenharEMapear();
            }
            if(mEtapa == 2)
            {
                atualizarParametros(parametros);
                desenharEprocessar();
            }
        }
Ejemplo n.º 15
0
        /// <summary>
        /// Crop image by provided points. NOT storing/changing any offset data of Offset properties
        /// </summary>
        /// <param name="image">Image to crop</param>
        /// <param name="points">Points to crop image by</param>
        /// <returns>Cropped image</returns>
        public Image<Gray, byte> Crop(Image<Gray, byte> image, List<PointF> points, out int xOffset, out int yOffset)
        {
            int xMax = 0;
            xOffset = image.Width;
            int yMax = 0;
            yOffset = image.Height;

            foreach (PointF point in points)
            {
                if (point.X > xMax)
                    xMax = Convert.ToInt32(point.X);
                if (point.X < xOffset)
                    xOffset = Convert.ToInt32(point.X);
                if (point.Y > yMax)
                    yMax = Convert.ToInt32(point.Y);
                if (point.Y < yOffset)
                    yOffset = Convert.ToInt32(point.Y);
            }

            int xSize = Convert.ToInt32(xMax - xOffset);
            int ySize = Convert.ToInt32(yMax - yOffset);


            Rectangle frame = new Rectangle(new System.Drawing.Point(xOffset, yOffset), new System.Drawing.Size(xSize, ySize));
            Mat cropped = null;
            try
            {
                cropped = new Mat(image.Clone().Mat, frame);
            }
            catch (Exception e)
            {
                Log.Print(String.Format("Error occurred during image crop. Reason: {0}", e.Message), eCategory.Error, LogTag.IMAGE);
            }
            Image<Gray, byte> croppedImage = cropped.ToImage<Gray, byte>();
            return croppedImage;
        }
Ejemplo n.º 16
0
        void ShowFromCam(object sender, EventArgs e)
        {

            Mat frameMat = new Mat();

            try
            {
                if (!cap.Retrieve(frameMat, 0))
                    return;

                //image = null;
                //while (image == null) image = cap;
                Image<Bgr, byte> returnimage = frameMat.ToImage<Bgr, byte>();


                pictureBox1.Image = returnimage.ToBitmap();
                //Image<Bgr, byte> frame = frameMat.ToImage<Bgr, byte>();
            }
            catch
            {

            }
            // Get image.

            

        }
Ejemplo n.º 17
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            try
            {
                if (capturecam != null)
                {
                    Mat frame = new Mat();
                    capturecam.Retrieve(frame, 0);

                    if (frame != null)
                    {
                        _captureInProgress = true;

                        Mat cannyFrame = new Mat();
                        CvInvoke.Canny(frame, cannyFrame, 100, 60);

                        Image<Bgr, Byte> frameCanny = cannyFrame.ToImage<Bgr, Byte>();

                        Image<Bgr, Byte> frameBgr = frame.ToImage<Bgr, Byte>();
                        Image<Gray, Byte> frameGray = frame.ToImage<Gray, Byte>();

                        //CvInvoke.EqualizeHist(frameGray, frameGray); // normalizes brightness and increases contrast of the image

                        Rectangle[] facesDetected = _cascadeClassifierFace.DetectMultiScale(frameGray, 1.1, 10, new Size(20, 20));  //Size.Empty); //the actual face detection happens here

                        //faces.AddRange(facesDetected);

                        foreach (Rectangle f in facesDetected)
                        {
                            //frameBgr.Draw(f, new Bgr(Color.OrangeRed), 2); //the detected face(s) is highlighted here using a box that is drawn around it/them
                            CvInvoke.Rectangle(frameBgr, f, new Bgr(Color.OrangeRed).MCvScalar, 2);

                            //Console.WriteLine("Rect : " + f);

                            _centerFace.X = (int)(f.X + f.Width * 0.5);
                            _centerFace.Y = (int)(f.Y + f.Height * 0.5);

                            _trajFace.Add(_centerFace);

                            textBoxUpdate(textBoxPosX, "X : " + _centerFace.X.ToString());
                            textBoxUpdate(textBoxPosY, "Y : " + _centerFace.Y.ToString());

                            CvInvoke.Circle(frameBgr, _centerFace, 1, new Bgr(Color.OrangeRed).MCvScalar, 5, Emgu.CV.CvEnum.LineType.AntiAlias, 0);

                            //centerRect.
                            //Get the region of interest on the faces
                            using (UMat faceRegion = new UMat(frameGray.ToUMat(), f))
                            {
                                Rectangle[] eyesDetected = _cascadeClassifierEye.DetectMultiScale(faceRegion, 1.1, 10, new Size(20, 20));

                                foreach (Rectangle e in eyesDetected)
                                {
                                    Rectangle eyeRect = e;
                                    eyeRect.Offset(f.X, f.Y);
                                    //eyes.Add(eyeRect);
                                    //frameBgr.Draw(eyeRect, new Bgr(Color.Red), 2); //the eyes face(s) is highlighted here using a box that is drawn around it/them
                                    //CvInvoke.Rectangle(frameBgr, eyeRect, new Bgr(Color.Blue).MCvScalar, 2);

                                    _centerEye.X = (int)(eyeRect.X + eyeRect.Width * 0.5);
                                    _centerEye.Y = (int)(eyeRect.Y + eyeRect.Height * 0.5);

                                    CvInvoke.Circle(frameBgr, _centerEye, 1, new Bgr(Color.Blue).MCvScalar, 5, Emgu.CV.CvEnum.LineType.AntiAlias, 0);

                                    LineSegment2D _lindeEye = new LineSegment2D(_centerEye, _centerEyePrev);

                                    if ((_firstLine) && (_lindeEye.P1 != _lindeEye.P2)) CvInvoke.Line(frameBgr, _centerEye, _centerEyePrev, new Bgr(Color.Blue).MCvScalar, 1, Emgu.CV.CvEnum.LineType.AntiAlias, 0);

                                    _centerEyePrev = _centerEye;
                                    _firstLine = true;

                                    if ((_lindeEye.P1 != _lindeEye.P2) && (_lindeEye.P1.X != 0) && (_lindeEye.P2.X != 0) && (_lindeEye.P1.Y != 0) && (_lindeEye.P2.Y != 0))
                                    {
                                        //double angle = (Math.Cos((_lindeRef.P2.X - _lindeRef.P1.X) / ((_lindeEye.P2.X - _lindeEye.P1.X)))*180) / Math.PI;
                                        double angle = (Math.Atan2(Math.Abs(_lindeEye.P1.Y - _lindeEye.P2.Y), (_lindeEye.P1.X - _lindeEye.P2.X)) * 180 / Math.PI);

                                        //Console.WriteLine("Angle : " + angle);

                                        if (angle != 1)
                                        {

                                            //angle -= 57.0;
                                            //Console.WriteLine("Angle : " + angle);

                                            //if ((Math.Abs(angle) > 15) && (Math.Abs(angle) < 50))
                                            if (angle < 90)
                                            {

                                                textBoxUpdate(textBoxAngle, Math.Round(angle).ToString() + "° ");
                                                //frameBgr = frameBgr.Rotate(angle, new Bgr(Color.Gray), false);
                                            }
                                            else if (angle > 90)
                                            {

                                                textBoxUpdate(textBoxAngle, (180 - Math.Round(angle)).ToString() + "° ");
                                                //frameBgr = frameBgr.Rotate((180-angle), new Bgr(Color.Gray), false);
                                            }
                                        }
                                    }

                                    /*using (Image<Bgr, Byte> drawing = new Image<Bgr, Byte>(imageBoxDraw.Width, imageBoxDraw.Height))
                                    {
                                        foreach (Point p in _trajFace)
                                        {
                                            CvInvoke.Circle(drawing, p, 1, new Bgr(Color.Red).MCvScalar, 1, Emgu.CV.CvEnum.LineType.AntiAlias, 0);
                                        }

                                        imageBoxDraw.Image = drawing;

                                    }*/
                                    //Graphics g = Graphics.FromHwnd(PictureBox.h);
                                    /*Graphics g = Graphics.FromHwnd(PictureBox.FromHandle);
                                    SolidBrush brush = new SolidBrush(Color.LimeGreen);
                                    Point dPoint = new Point(_centerEye.X, _centerEye.Y);
                                    dPoint.X = dPoint.X - 2;
                                    dPoint.Y = dPoint.Y - 2;
                                    Rectangle rect = new Rectangle(dPoint, new Size(4, 4));
                                    g.FillRectangle(brush, rect);
                                    g.Dispose();*/
                                }

                                processed.Image = faceRegion;
                            }
                        }

                        /*foreach (var eye in eyes)
                        {
                            frameBgr.Draw(eye, new Bgr(Color.Red), 2); //the eyes face(s) is highlighted here using a box that is drawn around it/them
                        }

                        foreach (var face in facesDetected)
                        {
                            frameBgr.Draw(face, new Bgr(Color.OrangeRed), 2); //the detected face(s) is highlighted here using a box that is drawn around it/them
                        }*/

                        original.Image = frameBgr;
                        //processed.Image = frameCanny;

                    }
                    else _captureInProgress = false;
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message.ToString());
            }
        }
Ejemplo n.º 18
0
        public override void ImageGrabbedHandler(object sender, EventArgs e)
        {
            if (_calibrationInProgress)
            {
                return;
            }

            using (var matCaptured = new Mat())
            {
                CameraCapture.Retrieve(matCaptured);

                var statusAccumulation = new StringBuilder();
                var bgrImage = matCaptured.ToImage<Bgr, byte>();

                DrawReticle(bgrImage, _centre, Color.Red);

                if (UserReticle != null)
                {
                    DrawReticle(bgrImage, UserReticle.Value, Color.Green);
                }

                var input = new CameraProcessInput();
                input.SetCapturedImage = true;
                input.Captured = matCaptured;

                CameraPanTiltProcessOutput output = null;

                if (chkBoxColourTracking.Checked)
                {
                    var result = _colourTrackingController.Process(input);
                    output = result;
                    if (result.IsDetected)
                    {
                        DrawReticle(bgrImage, result.Target, Color.Yellow);
                    }

                    imageBoxFiltered.Image = result.ThresholdImage;
                    statusAccumulation.AppendFormat("{0} moment area", result.MomentArea);
                   // WriteText(bgrImage, _captureConfig.Resolution.Height - 10, "Colour Tracking");
                }

                if (chkBoxFaceTracker.Checked)
                {
                   // WriteText(bgrImage, _captureConfig.Resolution.Height - 50, "Face Tracking");
                    var result = _faceTrackingController.Process(input);
                    output = result;
                    if (result.IsDetected)
                    {
                        foreach (var face in result.Faces)
                        {
                            bgrImage.Draw(face.Region, new Bgr(Color.Yellow), 2);
                        }

                        DrawReticle(bgrImage, result.Target, Color.Yellow);
                    }
                    statusAccumulation.AppendFormat("{0} faces detected", result.Faces.Count);
                }

                if (chkBoxMotionTracking.Checked)
                {
                   // WriteText(bgrImage, _captureConfig.Resolution.Height - 75, "Motion Tracking");
                    var result = _motionTrackingController.Process(input);
                    output = result;
                    if (result.IsDetected)
                    {
                        foreach (var motionSection in result.MotionSections)
                        {
                            bgrImage.Draw(motionSection.Region, new Bgr(Color.Green));
                        }
                        if (result.TargetedMotion != null)
                        {
                            bgrImage.Draw(result.TargetedMotion.Region, new Bgr(Color.Red), 2);
                        }
                    }

                    statusAccumulation.AppendFormat("{0} motions", result.MotionSections.Count);
                    imageBoxFiltered.Image = result.ForegroundImage;
                }

                if (chkMultimode.Checked)
                {
                    var multimodeOutput = _multimodePanTiltController.Process(input);
                    output = multimodeOutput;

                    if (output.Target != Point.Empty)
                    {
                        DrawReticle(bgrImage, output.Target, Color.Yellow);
                    }
                }

                if (output != null)
                {
                    if (output.IsServoInMotion)
                    {
                        statusAccumulation.AppendFormat(", Waiting for servo");
                    }
                    else
                    {
                        statusAccumulation.AppendFormat(", tracking took {0}", output.Elapsed.ToHumanReadable());
                    }
                    NotifyStatus(statusAccumulation.ToString());
                }

                imageBoxCaptured.Image = bgrImage;
            }
        }
Ejemplo n.º 19
0
        void LoadFromPath(/*string Path, string FileName*/List<cImageMetaInfo> ListImageMetaInfo)
        {
            if (ListImageMetaInfo == null) return;
            int ChannelStart = 0;

            for (int IdxName = 0; IdxName < ListImageMetaInfo.Count; IdxName++)
            {
                string CurrentName = ListImageMetaInfo[IdxName].FileName;

                if (CurrentName == "") continue;

                if (CurrentName.Contains("http://"))
                {
                    try
                    {
                        using (WebClient Client = new WebClient())
                        {
                            string[] ListForExt = CurrentName.Split('.');
                            Client.DownloadFile(CurrentName, "Tmp." + ListForExt[ListForExt.Length - 1]);
                            CurrentName = "Tmp." + ListForExt[ListForExt.Length - 1];
                        }
                    }
                    catch (Exception)
                    {
                        continue;
                    }

                }

                if ((File.Exists(CurrentName) == false)) continue;

                string[] ListSplits = CurrentName.Split('.');
                string Extension = ListSplits[ListSplits.Length - 1].ToLower();

                byte[] rgbValues = null;

                int NumBytePerPixel = 0;
                int NumBitsPerPixel = 0;
                int NumChannels = 0;
                object ResMeta = null;             


                switch (Extension)
                {
                    #region Cellomics c01 - BioFormats
                    case "c01":
                        [email protected] MyCellomicsReader = new [email protected]();
                        MyCellomicsReader.setId(CurrentName);

                        int SerieCurr = -1;
                        for (int IdxSerie = 0; IdxSerie < MyCellomicsReader.getSeriesUsedFiles().Count(); IdxSerie++)
                        {

                            if (MyCellomicsReader.getSeriesUsedFiles()[IdxSerie] == CurrentName)
                            {
                                SerieCurr = IdxSerie;
                                break;
                            }
                        }

                        MyCellomicsReader.setSeries(SerieCurr);
                        rgbValues = MyCellomicsReader.openBytes(0);
                        //int ImC = MyCellomicsReader.getImageCount();

                        this.Width = MyCellomicsReader.getSizeX();

                        //                     ImageProcessorReader r = new ImageProcessorReader(
                        //new ChannelSeparator(LociPrefs.makeImageReader()));


                        this.Height = MyCellomicsReader.getSizeY();
                        this.Depth = MyCellomicsReader.getSizeZ();
                        NumChannels = MyCellomicsReader.getSizeC();
                        NumBitsPerPixel = MyCellomicsReader.getBitsPerPixel();
                        MyCellomicsReader.close();
                        break;
                    #endregion
                    #region tiff - BioFormats
                    case "tiff":

                        [email protected] MyTiffReader = new [email protected]();
                        MyTiffReader.setId(CurrentName);
                        rgbValues = MyTiffReader.openBytes(0);
                        this.Width = MyTiffReader.getSizeX();
                        this.Height = MyTiffReader.getSizeY();
                        this.Depth = MyTiffReader.getSizeZ();
                        NumChannels = MyTiffReader.getSizeC();
                        NumBitsPerPixel = MyTiffReader.getBitsPerPixel();
                        MyTiffReader.close();


                        break;
                    #endregion
                    #region LSM - BioFormats
                    case "lsm":
                        [email protected] MyLSMReader = new [email protected]();
                        MyLSMReader.setId(CurrentName);

                        this.Width = MyLSMReader.getSizeX();
                        this.Height = MyLSMReader.getSizeY();

                        NumChannels = MyLSMReader.getSizeC();
                        this.Depth = MyLSMReader.getSizeZ();

                        this.Name = CurrentName;
                        this.SliceSize = this.Width * this.Height;
                        this.ImageSize = SliceSize * Depth;

                        if (IdxName == 0)
                            this.SingleChannelImage = new cListSingleChannelImage();

                        #region GetMetaData
                        ResMeta = MyLSMReader.getSeriesMetadataValue("VoxelSizeX");
                        if (ResMeta == null)
                            this.Resolution.X = 1;
                        else
                            this.Resolution.X = ((java.lang.Double)ResMeta).doubleValue();

                        ResMeta = MyLSMReader.getSeriesMetadataValue("VoxelSizeY");
                        if (ResMeta == null)
                            this.Resolution.X = 1;
                        else
                            this.Resolution.Y = ((java.lang.Double)ResMeta).doubleValue();

                        ResMeta = MyLSMReader.getSeriesMetadataValue("VoxelSizeZ");
                        if (ResMeta == null)
                            this.Resolution.Z = 1;
                        else
                            this.Resolution.Z = ((java.lang.Double)ResMeta).doubleValue();
                        #endregion

                        for (int IdxChannel = 0; IdxChannel < NumChannels; IdxChannel++)
                            this.SingleChannelImage.Add(new cSingleChannelImage(this.Width, this.Height, this.Depth, new cPoint3D(this.Resolution)));


                        float TmpValue = 0;
                        byte[] TmpTable;

                        for (int IdxZ = 0; IdxZ < this.Depth; IdxZ++)
                        {
                            for (int IdxChannel = 0; IdxChannel < NumChannels; IdxChannel++)
                            {
                                TmpTable = MyLSMReader.openBytes(IdxZ * NumChannels + IdxChannel);

                                for (int IdxY = 0; IdxY < this.Height; IdxY++)
                                    for (int IdxX = 0; IdxX < this.Width; IdxX++)
                                    {
                                        TmpValue = TmpTable[IdxX + IdxY * this.Width];

                                        this.SingleChannelImage[IdxChannel + ChannelStart].Data[IdxX + IdxY * this.Width + IdxZ * this.SliceSize] = TmpValue;
                                    }
                            }
                        }
                        NumBitsPerPixel = MyLSMReader.getBitsPerPixel();

                        // if the meta data are ok, take the name
                        for (int IdxChannel = 0; IdxChannel < NumChannels; IdxChannel++)
                        {
                            ResMeta = MyLSMReader.getSeriesMetadataValue("DataChannel #" + (IdxChannel + 1) + " Name");
                            if (ResMeta != null)
                                this.SingleChannelImage[IdxChannel + ChannelStart].Name = (string)ResMeta;
                        }
                        goto NEXTLOOP;
                    #endregion
                    #region tif - FreeImage
                    case "tif":


                        //Image<Gray, Single> myImage = new Image<Gray, Single>(@CurrentName);
                        //var watch = Stopwatch.StartNew();

                        int PageCount = 1;
                        this.Depth = PageCount;
                        NumChannels = 1;
                        
                        Mat myImage = new Mat(CurrentName, Emgu.CV.CvEnum.LoadImageType.AnyDepth);
                        //watch.Stop();
                        //cGlobalInfo.WindowHCSAnalyzer.richTextBoxConsole.AppendText("Opencv = " + watch.ElapsedMilliseconds + "\n");

                        for (int IDxPlane = 0; IDxPlane < PageCount; IDxPlane++)
                        {
                            
                            if (IDxPlane == 0)
                            {
                                this.Width = myImage.Width;
                                this.Height = myImage.Height;
                                this.Resolution.X = 1;
                                this.Resolution.Y = 1;
                                NumBitsPerPixel = 16;

                                for (int IdxChannel = 0; IdxChannel < NumChannels; IdxChannel++)
                                {
                                    cSingleChannelImage TmpChannelImage = new cSingleChannelImage(this.Width, this.Height, this.Depth, new cPoint3D(1, 1, 1));
                                    if (ListImageMetaInfo[IdxName].Name != "") TmpChannelImage.Name = ListImageMetaInfo[IdxName].Name;
                                    if (ListImageMetaInfo[IdxChannel].ResolutionX != -1) this.Resolution.X = ListImageMetaInfo[IdxChannel].ResolutionX;
                                    if (ListImageMetaInfo[IdxChannel].ResolutionY != -1) this.Resolution.Y = ListImageMetaInfo[IdxChannel].ResolutionY;
                                    if (ListImageMetaInfo[IdxChannel].ResolutionZ != -1) this.Resolution.Z = ListImageMetaInfo[IdxChannel].ResolutionZ;
                                    Image<Gray, float> myImage2 = myImage.ToImage<Gray, float>();
                                    //var watch2 = Stopwatch.StartNew();
                                    
                                    TmpChannelImage.SetNewDataFromOpenCV(myImage2);
                                    //watch2.Stop();
                                    //cGlobalInfo.WindowHCSAnalyzer.richTextBoxConsole.AppendText("Convert = " + watch2.ElapsedMilliseconds + "\n");
                                    this.SingleChannelImage.Add(TmpChannelImage);
                                }
                            }
                           
                            

                        }

                        this.Name = CurrentName;
                        this.SliceSize = this.Width * this.Height;
                        this.ImageSize = SliceSize * Depth;

                        //goto NEXTLOOP;                      

                       
                        break;
                       
                }

                #endregion
                #endregion

                NEXTLOOP:;

                ChannelStart += NumChannels;




            }
           


        }
Ejemplo n.º 20
0
        // ----- Button: Load Depthmap -----
        private void btn_loadDepthmap_Click(object sender, EventArgs e)
        {
            OpenFileDialog Openfile = new OpenFileDialog();
            if (Openfile.ShowDialog() == DialogResult.OK)
            {
                // Bild öffnen
                matDepthmap = CvInvoke.Imread(Openfile.FileName, LoadImageType.Grayscale);

                // Depthmap Bild erstellen
                imageDepthmap = matDepthmap.ToImage<Gray, Byte>();
            }

            // nächsten Button aktivieren
            btn_horizonDistance.Enabled = true;
        }
Ejemplo n.º 21
0
 private void ProcessFrame(object sender, EventArgs arg)
 {
     Mat frame = new Mat();
     frameNum = _capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames);
     Image<Bgr, Byte> frame1;
     int breastCount = 0;
     int pussyCount = 0;
     int dickCount = 0;
     string temp = "";
     _capture.Retrieve(frame, 0);
     frame1 = frame.ToImage<Bgr, Byte>();
     frame1 = frame1.Resize(_rescale, Emgu.CV.CvEnum.Inter.Cubic);
     frame = frame1.Mat;
     //MessageBox.Show(_nn.ToString());
     if (frame != null && frameCtr == _frameskip)
     {
         frameCtr = 0;
          using(UMat ugray = new UMat())
         {
             CvInvoke.CvtColor(frame, ugray, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray);
             CvInvoke.EqualizeHist(ugray, ugray);
             Rectangle[] breastDetected = cascadeBreast.DetectMultiScale(
                ugray,
                1.1,
                _nn,
                new Size(20, 20));
             Rectangle[] pussyDetected = cascadePuss.DetectMultiScale(
                ugray,
                1.1,
                _nn,
                new Size(20, 20));
             Rectangle[] dickDetected = cascadePen.DetectMultiScale(
                ugray,
                1.1,
                50,
                new Size(20, 20));
             foreach (Rectangle b in breastDetected)
             {
                 CvInvoke.Rectangle(frame, b, new Bgr(Color.Red).MCvScalar, 2);
                 
             }
             foreach (Rectangle b in pussyDetected)
             {
                 CvInvoke.Rectangle(frame, b, new Bgr(Color.Blue).MCvScalar, 2);
             }
             foreach (Rectangle b in dickDetected)
             {
                 CvInvoke.Rectangle(frame, b, new Bgr(Color.Green).MCvScalar, 2);
             }
              breastCount = breastDetected.Length;
              pussyCount = pussyDetected.Length;
              dickCount = dickDetected.Length;
              totalBreastCount += breastCount;
              totalPussyCount += pussyCount;
              totalDickCount += dickCount;
              if ((breastCount > 0 || pussyCount > 0 || dickCount > 0) && _pauseAtDetection)
              {
                  _capture.Pause();
                  playToggle.Invoke(new MethodInvoker(delegate { playToggle.Text = "Start"; }));
                  _captureInProgress = false;
                  if (breastCount > 0)
                  {
                      temp += ""+ breastCount + "breast(s) found\n"; 
                  }
                  if (pussyCount > 0)
                  {
                      temp += ""+ pussyCount+"pussy(s) found\n";
                  }
                  if (dickCount > 0)
                  {
                      temp += "" + dickCount + "dick(s) found\n";
                  }
                  MessageBox.Show(temp);
              }
         }            
     }
     if (_frameskip > 0)
     {
         frameCtr++;
     }
     label4.Invoke(new MethodInvoker(delegate { label4.Text = frameNum.ToString(); logger(frameNum, breastCount, pussyCount,dickCount); totalBreast.Text = totalBreastCount.ToString(); totalF.Text = totalPussyCount.ToString(); totalG.Text = totalDickCount.ToString(); }));
     imgBox.Image = frame;
     
 }
Ejemplo n.º 22
0
        public void ProcessFrame(object sender, EventArgs arg)
        {
            _capture.FlipHorizontal = true;                      // Переворачиваем изображение относительно оси У
            Mat imageMatrix = new Mat();                         //Матрица, которую мы забираем из потока камеры
            _capture.Retrieve(imageMatrix, 0);
            Image<Bgr, byte> imageFrameBGR = imageMatrix.ToImage<Bgr, byte>();
            Image<Gray, byte> imageFrameGray = RGBFilter(imageFrameBGR,
                                          red_color_min, red_color_max,//Фильтрация на пороговые значения цвета
                                          green_color_min, green_color_max,
                                          blue_color_min, blue_color_max);
            imageFrameGray = MassCenter(imageFrameGray);
            Display(imageMatrix, imageFrameGray);                //<--------------------- отображение

            if (_isDetected)                                     // Ищем движения
            {
                if (_count_frames == 0)                          //быдлокод потому что покадрово, нужно по времени
                {

                    _firstSpeedCorrection = true;
                }
                if(_firstSpeedCorrection)
                {
                    if(_count_frames!=0)
                    {
                        if (Math.Abs(x_coord[0] - center_x) >= 30 || Math.Abs(y_coord[0] - center_y) >= 30)
                        {
                            x_coord.Clear();
                            y_coord.Clear();
                        }
                        else
                        {
                            _firstSpeedCorrection = false;
                            x_begining = center_x;
                            y_begining = center_y;
                        }
                    }
                }

                x_coord.Add(center_x);
                y_coord.Add(center_y);

                x_ending = center_x;
                y_ending = center_y;
                _count_frames++;
            }
            else
            {
                if (my_timer.ElapsedMilliseconds > 2000) // 2 секунды на корректировку изображения
                {
                    my_timer.Reset();
                    if (_count_frames >= 5)
                    {
                        Line_func line = new Line_func(x_coord, y_coord);
                        labelFunc.Text = line.coord_a_.ToString() + "X + " + line.coord_b_.ToString();
                        _correction = false;

                        if (Math.Abs(x_ending - x_begining) > 200 && Math.Abs(line.coord_a_) < 1)
                        {
                            if (x_begining < 215)
                            {
                                labelType.Text = " Горизонтальная линия из левого края";
                                windowsChoose.nextSong();// след песня
                            }
                            else if (x_begining > 430)
                            {
                                labelType.Text = " Горизонтальная линия из правого края";
                                windowsChoose.prevSong();
                            }
                            else
                            {
                                if (x_ending - x_begining > 0)
                                {
                                    labelType.Text = " Горизонтальная линия из середины вправо";
                                    windowsChoose.playSong();
                                }
                                else if (x_ending - x_begining < 0)
                                {
                                    labelType.Text = " Горизонтальная линия из середины влево";
                                    windowsChoose.stopSong();
                                }
                            }
                        }
                        else if (Math.Abs(y_ending - y_begining) > 250 && Math.Abs(line.coord_a_) > 1)
                        {
                            if (y_ending - y_begining > 0)
                            {
                                labelType.Text = " Вертикальная линия сверху вниз";
                                windowsChoose.voulumeDown();
                            }
                            else if (y_ending - y_begining < 0)
                            {
                                labelType.Text = " Вертикальная линия снизу вверх";
                                windowsChoose.voulumeUp();
                            }
                        }
                        else
                            labelType.Text = "";
                    }
                    else
                    {
                        _correction = true;
                    }
                    if (!_correction)
                    {
                        labelDebagLog.Text = "(" + x_begining.ToString() + " ; " + y_begining.ToString() + ") - (" + x_ending + " ; " + y_ending + ")";
                        _count_frames = 0;
                        listBoxCoordinatesX.Items.Clear();
                        listBoxCoordinatesY.Items.Clear();
                        foreach(Double num in x_coord)
                        {
                            listBoxCoordinatesX.Items.Add(" X:" + num.ToString());
                        }
                        foreach (Double num in y_coord)
                        {
                            listBoxCoordinatesY.Items.Add(" Y:" + num.ToString());
                        }
                        my_timer.Reset();
                        x_coord.Clear();
                        y_coord.Clear();
                    }
                }
            }
        }
Ejemplo n.º 23
0
 /// <summary>
 /// Throws an exception if it isn't going to load
 /// </summary>
 private static void TestEmguCVLoad()
 {
     if (IntPtr.Size != 8)
     {
         throw new Exception("Change VS options to ensure 64bit IIS Express");
     }
     using (var test = new Mat())
     {
         var f = test.ToImage<Bgr, byte>();
         f.Dispose();
     }
 }
Ejemplo n.º 24
0
 private Image<Bgr, byte> PullImage()
 {
     Image<Bgr, byte> output;
     using (var matCaptured = new Mat())
     {
         CameraCapture.Retrieve(matCaptured);
         output = matCaptured.ToImage<Bgr, byte>();
         WriteText(output, 30, DateTime.Now.ToString(TimeFormat));
         imageBoxCaptured.Image = output;
     }
     return output;
 }
        public override Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> GetDifference(Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte> frame, IEnumerable<Emgu.CV.Image<Emgu.CV.Structure.Bgr, byte>> frameHistory)
        {
            if (frameHistory.Count() != _historySize || _mixtures != NumberOfMixtures.Value || _ratio != BackgroundRatio.Value || _noise != NoiseSigma.Value)
            {
                _historySize = frameHistory.Count();
                _mixtures = NumberOfMixtures.Value;
                _ratio = BackgroundRatio.Value;
                _noise = NoiseSigma.Value;

                ResetSubstractor();
            }

            Mat mask = new Mat();
            _mog.Apply(frame, mask);

            return mask.ToImage<Bgr, byte>();
        }
Ejemplo n.º 26
0
        public CameraTrackingFindSubjectsReturnModel FindSubjects()
        {
            double    largestW     = 0;
            double    largestH     = 0;
            double    centerX      = 0;
            double    centerY      = 0;
            bool      foundSubject = false;
            Rectangle subject      = new Rectangle();

            // get detection 'blobs' or regions
            CvBlobs blobs = new CvBlobs();

            _blobDetector.Detect(LastMask.ToImage <Gray, byte>(), blobs);
            blobs.FilterByArea(100, int.MaxValue);

            float scale = (LastFrame.Width + LastFrame.Width) / 2.0f;

            _tracker.Update(blobs, 0.01 * scale, 5, 5);

            FrameWidth  = LastFrame.Width;
            FrameHeight = LastFrame.Height;

            foreach (var pair in _tracker)
            {
                CvTrack b = pair.Value;

                // limit the largest and smallest size boxes we care about.
                if (b.BoundingBox.Width < (LastFrame.Width / SmallestDetectionWidthSizeDivisor) ||
                    b.BoundingBox.Height < (LastFrame.Height / SmallestDetectionHeightSizeDivisor) ||
                    (b.BoundingBox.Width > (LastFrame.Width / LargestDetectionWidthSizeDivisor) &&
                     b.BoundingBox.Height > (LastFrame.Height / LargestDetectionHeightSizeDivisor)))
                {
                    continue;
                }

                // keep track of the largest regions as we only care to track the largest
                if (b.BoundingBox.Width > largestW)
                {
                    subject  = b.BoundingBox;
                    largestW = b.BoundingBox.Width;
                    largestH = b.BoundingBox.Height;
                    centerX  = b.Centroid.X;
                    centerY  = b.Centroid.Y;
                    CvInvoke.Rectangle(LastFrame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 20);
                    CvInvoke.PutText(LastFrame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
                    foundSubject = true;
                }
                else
                {
                    CvInvoke.Rectangle(LastFrame, b.BoundingBox, new MCvScalar(255.0, 255.0, 255.0), 1);
                    CvInvoke.PutText(LastFrame, b.Id.ToString(), new Point((int)Math.Round(b.Centroid.X), (int)Math.Round(b.Centroid.Y)), FontFace.HersheyPlain, 1.0, new MCvScalar(255.0, 255.0, 255.0));
                }
            }

            return(new CameraTrackingFindSubjectsReturnModel()
            {
                CenterX = centerX,
                CenterY = centerY,
                BoundingBox = subject,
                FoundSubject = foundSubject
            });
        }
Ejemplo n.º 27
-1
        public void ProcessFrame(object sender, EventArgs arg)
        {
            _capture.FlipHorizontal = true;                      // Переворачиваем изображение относительно оси У
            Mat imageMatrix = new Mat();                         //Матрица, которую мы забираем из потока камеры
            _capture.Retrieve(imageMatrix, 0);
            Image<Bgr, byte> imageFrameBGR = imageMatrix.ToImage<Bgr, byte>(); //Преобразование
            Image<Gray, byte> imageFrameGray = RGBFilter(imageFrameBGR,
                                          red_color_min, red_color_max,//Фильтрация на пороговые значения цвета
                                          green_color_min, green_color_max,
                                          blue_color_min, blue_color_max);

            imageFrameGray = MassCenter(imageFrameGray);
            Display(imageMatrix, imageFrameGray);                //<--------------------- отображение

            setLabelValue(labelTimer, my_timer.ElapsedMilliseconds.ToString());

            if (_isDetected)                                     // Ищем движения, если нашли объект
            {
                x_coord.Add(center_x);                           //добавляем в массивы координаты его центра масс
                y_coord.Add(center_y);
                _count_frames++;                                 // увеличиваем счетчик кадров
            }

            else if(x_coord != null)                                    //Если объект не видно
            {
                moutionType(x_coord, y_coord);
            }
        }