コード例 #1
0
        public void ReadsLeftAngleLinesCorrectly()
        {
            var map = new int[][] {
                new int[] { 0, 0, 0 },
                new int[] { 1, 0, 0 },
                new int[] { 1, 0, 0 },
                new int[] { 1, 1, 1 },
                new int[] { 0, 0, 0 }
            };

            var d = new LineDetection(new Point(1, 0), (p) => {
                if (p.x >= 0 && p.x < map.Length && p.y >= 0 && p.y < map[p.x].Length)
                {
                    return(map[p.x][p.y] == 1);
                }

                return(false);
            });

            Assert.AreEqual(d.Lines.Length, 2);

            Assert.AreEqual(d.Lines[0].Start.x, 1);
            Assert.AreEqual(d.Lines[0].Start.y, 0);
            Assert.AreEqual(d.Lines[0].End.x, 3);
            Assert.AreEqual(d.Lines[0].End.y, 0);
            Assert.AreEqual(d.Lines[0].IsOrphan, true);

            Assert.AreEqual(d.Lines[1].Start.x, 3);
            Assert.AreEqual(d.Lines[1].Start.y, 0);
            Assert.AreEqual(d.Lines[1].End.x, 3);
            Assert.AreEqual(d.Lines[1].End.y, 2);
            Assert.AreEqual(d.Lines[1].IsOrphan, true);
        }
コード例 #2
0
ファイル: LineAnalyzer.cs プロジェクト: mostanes/umbrella2
        /// <summary>
        /// Merges line segment blobs (connected components) in one LineDetection.
        /// </summary>
        /// <param name="segment">Detected blobs.</param>
        /// <param name="Input">Input image.</param>
        /// <param name="OX">Delta between the data array and actual position, X component.</param>
        /// <param name="OY">Delta between the data array and actual position, Y component.</param>
        /// <returns>A LineDetection from the blobs.</returns>
        static LineDetection MergeBlobs(DetectionSegment segment, double[,] Input, int OX, int OY)
        {
            double          Xmean = 0, Ymean = 0;
            double          XX = 0, XY = 0, YY = 0;
            double          Flux = 0;
            double          XBmean = 0, YBmean = 0;
            List <double>   PValues      = new List <double>();
            List <IntPoint> MergedPoints = segment.Blobs.Aggregate(new List <IntPoint>(), (x, y) => { x.AddRange(y.Points); return(x); });

            foreach (IntPoint pt in MergedPoints)
            {
                double Val = Input[pt.Y, pt.X];
                Xmean  += pt.X; Ymean += pt.Y;
                XBmean += Val * pt.X; YBmean += Val * pt.Y;
                XX     += pt.X * pt.X; XY += pt.X * pt.Y; YY += pt.Y * pt.Y;
                Flux   += Val;
                PValues.Add(Val);
            }
            Xmean  /= MergedPoints.Count;
            Ymean  /= MergedPoints.Count;
            XBmean /= Flux;
            YBmean /= Flux;
            XX     /= MergedPoints.Count;
            XY     /= MergedPoints.Count;
            YY     /= MergedPoints.Count;
            XX     -= Xmean * Xmean;
            XY     -= Xmean * Ymean;
            YY     -= Ymean * Ymean;

            double Msq = Sqrt(XX * XX + 4 * XY * XY - 2 * XX * YY + YY * YY);
            double L1  = 1.0 / 2 * (XX + YY - Msq);
            double L2  = 1.0 / 2 * (XX + YY + Msq);
            double A1  = Atan2(2 * XY, -(-XX + YY + Msq));
            double A2  = Atan2(2 * XY, -(-XX + YY - Msq));

            LineDetection ld = new LineDetection()
            {
                Points = MergedPoints.Select((x) => new PixelPoint()
                {
                    X = x.X + OX, Y = x.Y + OY
                }).ToList(),
                EigenValue1 = L1,
                EigenValue2 = L2,
                EigenAngle1 = A1,
                EigenAngle2 = A2,
                Barycenter  = new PixelPoint()
                {
                    X = XBmean + OX, Y = YBmean + OY
                },
                PointsCenter = new PixelPoint()
                {
                    X = Xmean + OX, Y = Ymean + OY
                },
                Flux        = Flux,
                PointValues = PValues
            };

            return(ld);
        }
コード例 #3
0
        public void ReadsCubeLinesCorrectly()
        {
            var map = new int[][] {
                new int[] { 0, 0, 0 },
                new int[] { 1, 1, 1 },
                new int[] { 1, 0, 1 },
                new int[] { 1, 1, 1 },
                new int[] { 0, 0, 0 }
            };

            var d = new LineDetection(new Point(1, 0), (p) => {
                // Ensure coordinate is valid in our map
                if (p.x >= 0 && p.x < map.Length && p.y >= 0 && p.y < map[p.x].Length)
                {
                    return(map[p.x][p.y] == 1);
                }

                return(false);
            });

            Assert.AreEqual(d.Lines.Length, 4);

            Assert.AreEqual(d.Lines[0].Start.x, 1);
            Assert.AreEqual(d.Lines[0].Start.y, 0);
            Assert.AreEqual(d.Lines[0].End.x, 3);
            Assert.AreEqual(d.Lines[0].End.y, 0);
            Assert.AreEqual(d.Lines[0].IsOrphan, false);

            Assert.AreEqual(d.Lines[1].Start.x, 3);
            Assert.AreEqual(d.Lines[1].Start.y, 0);
            Assert.AreEqual(d.Lines[1].End.x, 3);
            Assert.AreEqual(d.Lines[1].End.y, 2);
            Assert.AreEqual(d.Lines[1].IsOrphan, false);

            Assert.AreEqual(d.Lines[1].Start.x, 3);
            Assert.AreEqual(d.Lines[2].Start.y, 2);
            Assert.AreEqual(d.Lines[2].End.x, 1);
            Assert.AreEqual(d.Lines[2].End.y, 2);
            Assert.AreEqual(d.Lines[2].IsOrphan, false);

            Assert.AreEqual(d.Lines[3].Start.x, 1);
            Assert.AreEqual(d.Lines[3].Start.y, 2);
            Assert.AreEqual(d.Lines[3].End.x, 1);
            Assert.AreEqual(d.Lines[3].End.y, 0);
            Assert.AreEqual(d.Lines[3].IsOrphan, false);
        }
コード例 #4
0
        public static Image <Bgr, byte> Detection(Image <Bgr, byte> originalImage, DetectionType DT, out ErrorCode Err)
        {
            Err = ErrorCode.Normal;
            Image <Bgr, byte> b = null;

            switch (DT)
            {
            case DetectionType.Feature:
                switch (_featureType)
                {
                case featureDetectionType.cannyEdge:
                    b = ContourDetection.cannyEdges(originalImage, false).Convert <Bgr, Byte>(); break;

                case featureDetectionType.contour:
                    b = ContourDetection.contourDetection(originalImage); break;

                case featureDetectionType.line:
                    b = LineDetection.lineDetection(originalImage); break;
                }
                break;

            case DetectionType.Object:
                if (GV.imgOriginal != null && GV.object_img != null)
                {
                    Image <Bgr, byte> outPutImg = GV.imgOriginal;
                    switch (_objectType)
                    {
                    case objectDetectionType.SURF:
                        if (!SURF.SearchObject_SURF(GV.imgOriginal.Convert <Gray, byte>(), GV.object_img.Convert <Gray, byte>(), out outPutImg))
                        {
                            Err = ErrorCode.SearchSURF_Fail;
                        }
                        else
                        {
                            BindManager.BindMngr.GMessage.value = "Found using SURF";
                        }
                        b = outPutImg; break;

                    case objectDetectionType.FFT:
                        if (!FFT.searchObject_FFT(GV.imgOriginal, GV.object_img, out outPutImg))
                        {
                            Err = ErrorCode.SearchFFT_Fail;
                        }
                        else
                        {
                            BindManager.BindMngr.GMessage.value = "Found using FFT";
                        }
                        b = outPutImg; break;

                    case objectDetectionType.color:
                        if (!ColorDetection2.Color_detection(GV.imgOriginal, GV.object_img, out outPutImg, Parameters._colorTolerance))
                        {
                            Err = ErrorCode.SearchColor_Fail;
                        }
                        else
                        {
                            //outPutImg = ContourDetection.contourDetection(outPutImg);
                            PointF[] pts = FindWhitePoints(outPutImg.Convert <Gray, byte>());

                            MCvBox2D box = SquareFittingWithAngle(pts);
                            outPutImg.Draw(box, new Bgr(Color.Green), 2);
                            BindManager.BindMngr.GMessage.value = $"Displaying matching colors. Angle [{box.angle}deg]";
                        }
                        b = outPutImg; break;
                    }
                }
                else if (GV.object_img == null)
                {
                    Err = ErrorCode.No_object_image;
                }

                break;
            }
            return(b);
        }