コード例 #1
0
        public void PointsMarkerTest1()
        {
            IEnumerable <FastRetinaKeypoint> points = new FastRetinaKeypoint[]
            {
                new FastRetinaKeypoint(1, 2),
                new FastRetinaKeypoint(3, 4),
            };

            var marker = new PointsMarker(points);

            double[,] m = Matrix.Zeros(5, 5);
            Bitmap bmp = m.ToBitmap();

            marker.ApplyInPlace(bmp);

            double[,] actual = bmp.ToMatrix(0);

            double[,] expected =
            {
                { 0, 0, 0, 0, 0 },
                { 1, 1, 1, 0, 0 },
                { 1, 1, 1, 0, 0 },
                { 1, 1, 1, 1, 1 },
                { 0, 0, 1, 1, 1 },
            };

            Assert.AreEqual(expected, actual);
        }
コード例 #2
0
        public void blobcounter_test()
        {
            string basePath = Path.Combine(NUnit.Framework.TestContext.CurrentContext.TestDirectory, "Resources");

            #region doc_process
            // Load an example image containing blobs (such the sample from the Blob Detection sample applications)
            // https://github.com/accord-net/framework/raw/development/Samples/Imaging/Detection%20(Blobs)/demo.png
            Bitmap image = Accord.Imaging.Image.FromFile(Path.Combine(basePath, "blob-input.png"));

            // Creeate a new blob counter object
            var blobCounter = new BlobCounter();

            // Process the image looking for blobs
            blobCounter.ProcessImage(image);

            // Get information about all the image blobs found:
            Blob[] blobs = blobCounter.GetObjectsInformation();

            // Prepare to extract their Convex Hull
            var grahamScan = new GrahamConvexHull();
            var colors     = new ColorSequenceCollection();

            // For each blob in the image
            for (int i = 0; i < blobs.Length; i++)
            {
                // Get the blob
                Blob blob = blobs[i];

                // Collect edge points
                List <IntPoint> edgePoints = blobCounter.GetBlobsEdgePoints(blob);

                // Find convex hull
                List <IntPoint> hull = grahamScan.FindHull(edgePoints);

                // Prepare to mark the hull in the image
                var marker = new PointsMarker(colors[i])
                {
                    Points  = hull,
                    Connect = true // connect the points with line segments
                };

                // Draw the hull lines
                marker.ApplyInPlace(image);
            }

            // Save the image to disk
            image.Save(Path.Combine(basePath, "test.png"));
            #endregion

            Assert.AreEqual(25, blobs.Length);
        }
コード例 #3
0
        private static void show(Bitmap hand, List <IntPoint> contour, List <IntPoint> peaks, List <IntPoint> supports)
        {
            PointsMarker cmarker = new PointsMarker(contour, Color.White, 1);

            cmarker.ApplyInPlace(hand);

            PointsMarker pmarker = new PointsMarker(peaks, Color.Green, 5);

            pmarker.ApplyInPlace(hand);

            PointsMarker hmarker = new PointsMarker(supports, Color.Yellow, 5);

            hmarker.ApplyInPlace(hand);

            ImageBox.Show(hand, PictureBoxSizeMode.Zoom);
        }
コード例 #4
0
        // New frame received by the player
        private void videoSourcePlayer_NewFrame(object sender, ref Bitmap image)
        {
            lock (this)
            {
                if (tracker == null)
                {
                    return;
                }

                if (form != null && !form.IsDisposed)
                {
                    form.Image = image;
                }

                BitmapData data = image.LockBits(new Rectangle(0, 0, image.Width, image.Height),
                                                 ImageLockMode.ReadWrite, image.PixelFormat);

                UnmanagedImage img = new UnmanagedImage(data);

                tracker.ComputeOrientation = showAngle;

                tracker.ProcessFrame(img);

                Rectangle rect = tracker.TrackingObject.Rectangle;

                UnmanagedImage hand = tracker.TrackingObject.Image;


                if (hand != null && (showContour || showFingertips))
                {
                    UnmanagedImage grayhand = Grayscale.CommonAlgorithms.BT709.Apply(hand);

                    blur.ApplyInPlace(grayhand);

                    List <IntPoint> contour = bf.FindContour(grayhand);

                    for (int i = 0; i < contour.Count; i++)
                    {
                        contour[i] += new IntPoint(rect.X, rect.Y);
                    }

                    List <IntPoint> peaks = kcurv.FindPeaks(contour);

                    if (showContour)
                    {
                        cmarker.Points = contour;
                        cmarker.ApplyInPlace(img);
                    }

                    if (showFingertips)
                    {
                        pmarker.Points = peaks;
                        pmarker.ApplyInPlace(img);
                    }
                }

                if (showRectangle)
                {
                    RectanglesMarker marker = new RectanglesMarker(rect);
                    marker.ApplyInPlace(img);
                }

                if (showAngle)
                {
                    LineSegment axis = tracker.TrackingObject.GetAxis(AxisOrientation.Vertical);

                    if (axis != null)
                    {
                        // Draw X axis
                        Drawing.Line(img, axis.Start.Round(), axis.End.Round(), Color.Red);
                    }
                }

                image.UnlockBits(data);
            }
        }