Beispiel #1
0
        public void FindBoundPolygon()
        {
            // find convexhull
            PointF[] ps = new PointF[this.slicePoints2d.Count];
            for (int i = 0; i < this.slicePoints2d.Count; i++)
            {
                PointF p = new PointF((float)this.slicePoints2d[i].x, (float)this.slicePoints2d[i].y);
                ps[i] = p;
            }
            PointF[] hull = CvInvoke.ConvexHull(ps);

            // find boundary polygon
            VectorOfPointF hull2 = new VectorOfPointF();

            hull2.Push(hull);
            VectorOfPointF poly = new VectorOfPointF();

            // when inferring # of polygon edge, the 3-rd param can be [0.0005,0.0015], than choose the best(how to define "best"??)
            CvInvoke.ApproxPolyDP(hull2, poly, 0.0003, true);
            for (int i = 0; i < poly.Size; i++)
            {
                this.cornerPoints2d.Add(new MyVector2(poly[i].X, poly[i].Y));
            }

            // unproject to 3d
            foreach (MyVector2 corner2d in this.cornerPoints2d)
            {
                MyVector3 corner3d = frame.GetPointSpaceCoord(new MyVector3(corner2d, 0.0));
                this.cornerPoints3d.Add(corner3d);
            }
        }
Beispiel #2
0
        private void btStep3_Click(object sender, EventArgs e)
        {
            Mat paper = new Mat();

            Image <Gray, Byte> input = inputMat.ToImage <Gray, Byte>();

            cropped = new Mat();
            VectorOfPointF coners = new VectorOfPointF();

            //List<Point> contourPoints;

            ////find bounding contour
            //contourPoints = docContours.ToArrayOfArray()
            //    .Where(group => group.Length == docContours.ToArrayOfArray().Max(points => points.Length))
            //    .SingleOrDefault().ToList();



            Rectangle rect = CvInvoke.BoundingRectangle(docContours[0]);
            Mat       quad = new Mat();

            quad.Create(answerSheetRealSize.Height, answerSheetRealSize.Width, DepthType.Cv8U, 0);
            VectorOfPointF quadPts = new VectorOfPointF();

            quadPts.Push(new PointF[] { new PointF(0, 0), new PointF(quad.Cols, 0), new PointF(quad.Cols, quad.Rows), new PointF(0, quad.Rows) });

            Mat transmat = CvInvoke.GetPerspectiveTransform(docConers, quadPts);

            CvInvoke.WarpPerspective(grayInput, cropped, transmat, quad.Size);
            imageResult.Image = cropped;
            //input.DrawPolyline(contourPoints.ToArray<Point>(), true, new Gray(0), 10);
            //imageResult.Image = input;
        }
Beispiel #3
0
        /// <summary>
        /// Return a contour that is translated.
        /// </summary>
        /// <param name="contourIn">Contour that should be translated</param>
        /// <param name="offset_x">X translation</param>
        /// <param name="offset_y">Y translation</param>
        /// <returns>Translated contour</returns>
        public static VectorOfPointF TranslateContour(VectorOfPointF contourIn, int offset_x, int offset_y)
        {
            VectorOfPointF ret_contour = new VectorOfPointF();

            for (int i = 0; i < contourIn.Size; i++)
            {
                ret_contour.Push(new PointF((float)(contourIn[i].X + offset_x + 0.5), (float)(contourIn[i].Y + offset_y + 0.5)));
            }
            return(ret_contour);
        }
        public VectorOfPoint GetTrainContour()
        {
            if (_trainContour.Size == 0)
            {
                VectorOfPointF corners = new VectorOfPointF();
                corners.Push(new PointF[] { new PointF(0.0f, 0.0f) });
                corners.Push(new PointF[] { new PointF(_referenceTrainImage.Cols, 0.0f) });
                corners.Push(new PointF[] { new PointF(_referenceTrainImage.Cols, _referenceTrainImage.Rows) });
                corners.Push(new PointF[] { new PointF(0.0f, _referenceTrainImage.Rows) });

                VectorOfPointF transformedCorners = new VectorOfPointF();
                CvInvoke.PerspectiveTransform(corners, transformedCorners, _homography);

                for (int i = 0; i < transformedCorners.Size; ++i)
                {
                    _trainContour.Push(new Point[] { new Point((int)transformedCorners[i].X, (int)transformedCorners[i].Y) });
                }
            }
            return(_trainContour);
        }
Beispiel #5
0
        private VectorOfPointF RemoveUnusedLine(PointF[] linesArray)
        {
            VectorOfPointF lines = new VectorOfPointF();

            for (int i = 0; i < linesArray.Length; i++)
            {
                if (linesArray[i].X != 0 || linesArray[i].Y != -100)
                {
                    lines.Push(new PointF[] { linesArray[i] });
                }
            }
            return(lines);
        }
Beispiel #6
0
        VectorOfPointF CreateGrid(Image <Bgr, Byte> image)
        {
            int            s  = 25;
            VectorOfPointF vf = new VectorOfPointF();

            for (int i = 0; i < image.Size.Width / s; i++)
            {
                for (int j = 0; j < image.Size.Height / s; j++)
                {
                    vf.Push(new PointF[] { new PointF(i * s, j * s) });
                }
            }

            return(vf);
        }
Beispiel #7
0
        public void CalculateNewPPositionReverseAffine(Mat transformationMat)
        {
            var inverseTransformationMat = new Mat();

            CvInvoke.InvertAffineTransform(transformationMat, inverseTransformationMat);
            var ogPoints  = new VectorOfPointF();
            var newPoints = new VectorOfPointF();

            ogPoints.Push(new [] { this.PointBBoxA, this.PointBBoxB, this.PointP });
            CvInvoke.Transform(ogPoints, newPoints, inverseTransformationMat);
            var newPointsArray = newPoints.ToArray();

            this.TransformedPointBBoxA = newPointsArray[0];
            this.TransformedPointBBoxB = newPointsArray[1];
            this.TransformedPointP     = newPointsArray[2];
        }
        public static double ValidateCharuco(int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary, Size imageSize, VectorOfInt charucoIds, VectorOfPointF charucoCorners, VectorOfInt markerCounterPerFrame, bool fisheye, Func <byte[], byte[]> GetRemoteChessboardCorner, Mat cameraMatrix, Mat distCoeffs)
        {
            VectorOfVectorOfPoint3D32F processedObjectPoints = new VectorOfVectorOfPoint3D32F();
            VectorOfVectorOfPointF     processedImagePoints  = new VectorOfVectorOfPointF();
            VectorOfPoint3D32F         rvecs = new VectorOfPoint3D32F();
            VectorOfPoint3D32F         tvecs = new VectorOfPoint3D32F();

            int k = 0;

            for (int i = 0; i < markerCounterPerFrame.Size; i++)
            {
                int                nMarkersInThisFrame       = markerCounterPerFrame[i];
                VectorOfPointF     currentImgPoints          = new VectorOfPointF();
                VectorOfPointF     currentImgPointsUndistort = new VectorOfPointF();
                VectorOfInt        currentIds       = new VectorOfInt();
                VectorOfPoint3D32F currentObjPoints = new VectorOfPoint3D32F();
                Mat                tvec             = new Mat();
                Mat                rvec             = new Mat();

                for (int j = 0; j < nMarkersInThisFrame; j++)
                {
                    currentImgPoints.Push(new PointF[] { charucoCorners[k] });
                    currentIds.Push(new int[] { charucoIds[k] });
                    currentObjPoints.Push(new MCvPoint3D32f[] { GetChessboardCorner(squaresX, squaresY, squareLength, markerLength, charucoIds[k], dictionary, GetRemoteChessboardCorner) });
                    k++;
                }

                Mat distCoeffsNew = new Mat(1, 4, DepthType.Cv64F, 1);
                distCoeffsNew.SetValue(0, 0, 0);
                distCoeffsNew.SetValue(0, 1, 0);
                distCoeffsNew.SetValue(0, 2, 0);
                distCoeffsNew.SetValue(0, 3, 0);

                Fisheye.UndistorPoints(currentImgPoints, currentImgPointsUndistort, cameraMatrix, distCoeffs, Mat.Eye(3, 3, DepthType.Cv64F, 1), Mat.Eye(3, 3, DepthType.Cv64F, 1));
                if (ArucoInvoke.EstimatePoseCharucoBoard(currentImgPointsUndistort, currentIds, CreateBoard(squaresX, squaresY, squareLength, markerLength, new Dictionary(dictionary)), Mat.Eye(3, 3, DepthType.Cv64F, 1), distCoeffsNew, rvec, tvec))
                {
                    rvecs.Push(new MCvPoint3D32f[] { new MCvPoint3D32f((float)rvec.GetValue(0, 0), (float)rvec.GetValue(1, 0), (float)rvec.GetValue(2, 0)) });
                    tvecs.Push(new MCvPoint3D32f[] { new MCvPoint3D32f((float)tvec.GetValue(0, 0), (float)tvec.GetValue(1, 0), (float)tvec.GetValue(2, 0)) });

                    processedImagePoints.Push(currentImgPoints);
                    processedObjectPoints.Push(currentObjPoints);
                }
            }

            return(Validate(processedObjectPoints, processedImagePoints, cameraMatrix, distCoeffs, rvecs, tvecs, fisheye));
        }
        public static (Mat cameraMatrix, Mat distCoeffs, double rms) CalibrateCharuco(int squaresX, int squaresY, float squareLength, float markerLength, PredefinedDictionaryName dictionary, Size imageSize, VectorOfInt charucoIds, VectorOfPointF charucoCorners, VectorOfInt markerCounterPerFrame, bool fisheye, Func <byte[], byte[]> GetRemoteChessboardCorner)
        {
            Mat    cameraMatrix = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            Mat    distCoeffs   = new Mat(1, 4, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
            double rms          = 0.0;

            VectorOfVectorOfPoint3D32F processedObjectPoints = new VectorOfVectorOfPoint3D32F();
            VectorOfVectorOfPointF     processedImagePoints  = new VectorOfVectorOfPointF();

            int k = 0;

            for (int i = 0; i < markerCounterPerFrame.Size; i++)
            {
                int                nMarkersInThisFrame = markerCounterPerFrame[i];
                VectorOfPointF     currentImgPoints    = new VectorOfPointF();
                VectorOfPoint3D32F currentObjPoints    = new VectorOfPoint3D32F();

                for (int j = 0; j < nMarkersInThisFrame; j++)
                {
                    currentImgPoints.Push(new PointF[] { charucoCorners[k] });
                    currentObjPoints.Push(new MCvPoint3D32f[] { GetChessboardCorner(squaresX, squaresY, squareLength, markerLength, charucoIds[k], dictionary, GetRemoteChessboardCorner) });
                    k++;
                }

                processedImagePoints.Push(currentImgPoints);
                processedObjectPoints.Push(currentObjPoints);
            }

            VectorOfPoint3D32F rvecs = new VectorOfPoint3D32F();
            VectorOfPoint3D32F tvecs = new VectorOfPoint3D32F();

            if (fisheye)
            {
                Fisheye.Calibrate(processedObjectPoints, processedImagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, Fisheye.CalibrationFlag.FixSkew | Fisheye.CalibrationFlag.RecomputeExtrinsic, new MCvTermCriteria(400, double.Epsilon));
            }
            else
            {
                CvInvoke.CalibrateCamera(processedObjectPoints, processedImagePoints, imageSize, cameraMatrix, distCoeffs, new Mat(), new Mat(), CalibType.FixK3, new MCvTermCriteria(30, 1e-4));
            }

            rms = Validate(processedObjectPoints, processedImagePoints, cameraMatrix, distCoeffs, rvecs, tvecs, fisheye);

            return(cameraMatrix, distCoeffs, rms);
        }
Beispiel #10
0
        public VectorOfPointF AddCornerPoints(VectorOfPointF points, Mat img)
        {
            if (points.Size < 76)
            {
                int width  = img.Width;
                int height = img.Height;

                // top left
                PointF[] p0 = { new PointF(0, 0) };
                points.Push(p0);

                // top center
                PointF[] p1 = { new PointF((width / 2) - 1, 0) };
                points.Push(p1);

                // top right
                PointF[] p2 = { new PointF(width - 1, 0) };
                points.Push(p2);

                // center right
                PointF[] p3 = { new PointF(width - 1, (height / 2) - 1) };
                points.Push(p3);

                // bottom right
                PointF[] p4 = { new PointF(width - 1, height - 1) };
                points.Push(p4);

                // bottom center
                PointF[] p5 = { new PointF((width / 2) - 1, height - 1) };
                points.Push(p5);

                // bottom left
                PointF[] p6 = { new PointF(0, height - 1) };
                points.Push(p6);

                //center left
                PointF[] p7 = { new PointF(0, (height / 2) - 1) };
                points.Push(p7);
            }
            return(points);
        }
Beispiel #11
0
        private void CreateDelaunay(ref Mat img, ref Subdiv2D subdiv, ref VectorOfPointF points,
                                    bool drawAnimated, ref VectorOfVectorOfInt triangleIndexes)
        {
            PointF[] pointsArr = points.ToArray();
            foreach (PointF p in pointsArr)
            {
                subdiv.Insert(p);
                if (drawAnimated)
                {
                    Mat imgCopy = img.Clone();
                    DrawDelaunay(ref imgCopy, ref subdiv, new MCvScalar(255, 255, 255));
                    CvInvoke.Imshow("Delaunay Triangulation", imgCopy);
                }
            }

            // Unfortunately we don't get the triangles by there original point indexes.
            // We only get them with their vertex coordinates.
            // So we have to map them again to get the triangles with their point indexes.

            Size      size = img.Size;
            Rectangle rect = new Rectangle(0, 0, size.Width, size.Height);

            VectorOfInt ind = new VectorOfInt();

            int[]         indArr       = new int[3];
            Triangle2DF[] triangleList = subdiv.GetDelaunayTriangles();
            for (int i = 0; i < triangleList.Length; i++)
            {
                Triangle2DF t = triangleList[i];

                PointF ptzero = new PointF {
                    X = t.V0.X, Y = t.V0.Y
                };
                PointF[] PZero = new PointF[] { ptzero };

                PointF ptone = new PointF {
                    X = t.V1.X, Y = t.V1.Y
                };
                PointF[] POne = new PointF[] { ptone };

                PointF pttwo = new PointF {
                    X = t.V2.X, Y = t.V2.Y
                };
                PointF[] PTwo = new PointF[] { pttwo };

                VectorOfPointF pt = new VectorOfPointF();

                pt.Push(PZero);

                pt.Push(POne);
                pt.Push(PTwo);

                if (rect.Contains(new Point((int)pt[0].X, (int)pt[0].Y)) &&
                    rect.Contains(new Point((int)pt[1].X, (int)pt[1].Y)) &&
                    rect.Contains(new Point((int)pt[2].X, (int)pt[2].Y)))
                {
                    for (int j = 0; j < 3; j++)
                    {
                        for (int k = 0; k < points.Size; k++)
                        {
                            if (Math.Abs(pt[j].X - points[k].X) < 1.0 &&
                                Math.Abs(pt[j].Y - points[k].Y) < 1)
                            {
                                indArr[j] = k;
                            }
                        }
                    }
                }
                ind = new VectorOfInt(indArr);
                triangleIndexes.Push(ind);
            }
        }
        public override Bitmap GenerateSolutionImage(Matrix <int> solutionLocations, int solutionID, List <Piece> pieces)
        {
            float out_image_width = 0, out_image_height = 0;

            for (int i = 0; i < solutionLocations.Size.Width; i++)           // Calculate output image size
            {
                for (int j = 0; j < solutionLocations.Size.Height; j++)
                {
                    int piece_number = solutionLocations[j, i];
                    if (piece_number == -1)
                    {
                        continue;
                    }

                    float piece_size_x = (float)Utils.Distance(pieces[piece_number].GetCorner(0), pieces[piece_number].GetCorner(3));
                    float piece_size_y = (float)Utils.Distance(pieces[piece_number].GetCorner(0), pieces[piece_number].GetCorner(1));

                    out_image_width  += piece_size_x;
                    out_image_height += piece_size_y;
                }
            }
            out_image_width  = (out_image_width / solutionLocations.Size.Height) * 1.5f + BorderAroundSolutionImage;
            out_image_height = (out_image_height / solutionLocations.Size.Width) * 1.5f + BorderAroundSolutionImage;

            // Use get affine to map points...
            Image <Rgb, byte> final_out_image = new Image <Rgb, byte>((int)out_image_width, (int)out_image_height);

            PointF[,] points = new PointF[solutionLocations.Size.Width + 1, solutionLocations.Size.Height + 1];
            bool failed = false;

            for (int i = 0; i < solutionLocations.Size.Width; i++)
            {
                for (int j = 0; j < solutionLocations.Size.Height; j++)
                {
                    int piece_number = solutionLocations[j, i];

                    if (piece_number == -1)
                    {
                        failed = true;
                        break;
                    }
                    float          piece_size_x = (float)Utils.Distance(pieces[piece_number].GetCorner(0), pieces[piece_number].GetCorner(3));
                    float          piece_size_y = (float)Utils.Distance(pieces[piece_number].GetCorner(0), pieces[piece_number].GetCorner(1));
                    VectorOfPointF src          = new VectorOfPointF();
                    VectorOfPointF dst          = new VectorOfPointF();

                    if (i == 0 && j == 0)
                    {
                        points[i, j] = new PointF(BorderAroundSolutionImage, BorderAroundSolutionImage);
                    }
                    if (i == 0)
                    {
                        points[i, j + 1] = new PointF(BorderAroundSolutionImage, points[i, j].Y + BorderAroundSolutionImage + piece_size_y); //new PointF(points[i, j].X + border + x_dist, border);
                    }
                    if (j == 0)
                    {
                        points[i + 1, j] = new PointF(points[i, j].X + BorderAroundSolutionImage + piece_size_x, BorderAroundSolutionImage); //new PointF(border, points[i, j].Y + border + y_dist);
                    }

                    dst.Push(points[i, j]);
                    //dst.Push(points[i + 1, j]);
                    //dst.Push(points[i, j + 1]);
                    dst.Push(points[i, j + 1]);
                    dst.Push(points[i + 1, j]);
                    src.Push(pieces[piece_number].GetCorner(0));
                    src.Push(pieces[piece_number].GetCorner(1));
                    src.Push(pieces[piece_number].GetCorner(3));

                    //true means use affine transform
                    Mat a_trans_mat = CvInvoke.EstimateRigidTransform(src, dst, true);

                    Matrix <double> A = new Matrix <double>(a_trans_mat.Rows, a_trans_mat.Cols);
                    a_trans_mat.CopyTo(A);

                    PointF l_r_c = pieces[piece_number].GetCorner(2);       //Lower right corner of each piece

                    //Doing my own matrix multiplication
                    points[i + 1, j + 1] = new PointF((float)(A[0, 0] * l_r_c.X + A[0, 1] * l_r_c.Y + A[0, 2]), (float)(A[1, 0] * l_r_c.X + A[1, 1] * l_r_c.Y + A[1, 2]));

                    Mat layer      = new Mat();
                    Mat layer_mask = new Mat();

                    CvInvoke.WarpAffine(new Image <Rgb, byte>(pieces[piece_number].PieceImgColor.Bmp), layer, a_trans_mat, new Size((int)out_image_width, (int)out_image_height), Inter.Linear, Warp.Default, BorderType.Transparent);
                    CvInvoke.WarpAffine(new Image <Gray, byte>(pieces[piece_number].PieceImgBw.Bmp), layer_mask, a_trans_mat, new Size((int)out_image_width, (int)out_image_height), Inter.Nearest, Warp.Default, BorderType.Transparent);

                    layer.CopyTo(final_out_image, layer_mask);
                }

                if (failed)
                {
                    PluginFactory.LogHandle.Report(new LogBox.LogEvents.LogEventError("Failed to generate solution " + solutionID + " image. Only partial image generated."));
                    break;
                }
            }

            return(final_out_image.Clone().Bitmap);
        }
Beispiel #13
0
        private void btStep2_Click(object sender, EventArgs e)
        {
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();

            Mat hierarchy = new Mat();

            Image <Bgr, Byte> input = inputMat.ToImage <Bgr, Byte>();

            CvInvoke.FindContours(edgeInput.Clone(), contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);

            docContours = new VectorOfVectorOfPoint();

            docConers = new VectorOfPointF();

            for (int i = 0; i < contours.Size; i++)
            {
                double        peri = CvInvoke.ArcLength(contours[i], true);
                VectorOfPoint poly = new VectorOfPoint();
                CvInvoke.ApproxPolyDP(contours[i], poly, 0.02 * peri, true);
                Rectangle docBounding = CvInvoke.BoundingRectangle(poly);
                if (poly.Size == 4 && docBounding.Width > 500 && docBounding.Height > 300)
                {
                    PointF[] listConer = new PointF[4];

                    Point[] ps = poly.ToArray().OrderBy(point => point.X).ToArray <Point>();
                    if (ps[0].Y > ps[1].Y)
                    {
                        listConer[0] = ps[1];
                        listConer[3] = ps[0];
                    }
                    else
                    {
                        listConer[3] = ps[1];
                        listConer[0] = ps[0];
                    }

                    if (ps[2].Y < ps[3].Y)
                    {
                        listConer[1] = ps[2];
                        listConer[2] = ps[3];
                    }
                    else
                    {
                        listConer[2] = ps[2];
                        listConer[1] = ps[3];
                    }

                    listConer[0].X += cutOffset;
                    listConer[0].Y += cutOffset;

                    listConer[1].X -= cutOffset;
                    listConer[1].Y += cutOffset;

                    listConer[2].X -= cutOffset;
                    listConer[2].Y -= cutOffset;

                    listConer[3].X += cutOffset;
                    listConer[3].Y -= cutOffset;

                    docConers.Push(listConer);
                    MessageBox.Show("Document was discovered");
                    break;
                }
            }

            //CvInvoke.DrawContours(inputMat, contours, -1, new MCvScalar(255, 0, 0),3);

            imageResult.Image = inputMat;
            //if (contours.Size == 4)
            docContours = contours;
        }
Beispiel #14
0
        public static void FitEdge(Image <Gray, byte> inputImage, int startRow, int endRow, int startCol, int endCol, bool isTopBottom, out WaferEdgeFit edge)
        {
            edge = new WaferEdgeFit();

            Rectangle origRoi = inputImage.ROI;
            Rectangle sideRoi = new Rectangle(startCol, startRow, endCol - startCol, endRow - startRow);

            bool startFromRight = !isTopBottom && startCol > origRoi.Width / 2 || isTopBottom && startRow > origRoi.Height / 2;

            inputImage.ROI = sideRoi;

            int workingWidth  = isTopBottom ? sideRoi.Height : sideRoi.Width;
            int workingHeight = isTopBottom ? sideRoi.Width : sideRoi.Height;

            using (Image <Gray, float> workImage = new Image <Gray, float>(workingWidth, workingHeight))
            {
                double gradientLimit;

                using (Image <Gray, float> sobelImage =
                           isTopBottom ? inputImage.Sobel(0, 1, 3) : inputImage.Sobel(1, 0, 3))
                {
                    using (Image <Gray, float> nullImage = new Image <Gray, float>(sobelImage.Size))
                    {
                        CvInvoke.AbsDiff(sobelImage, nullImage, sobelImage);
                    }

                    using (Image <Gray, byte> mask = new Image <Gray, byte>(sideRoi.Width, sideRoi.Height))
                    {
                        CvInvoke.Threshold(inputImage, mask, 0, 1, ThresholdType.Binary);
                        MCvScalar gradientMean = new MCvScalar();
                        MCvScalar gradientStd  = new MCvScalar();
                        CvInvoke.MeanStdDev(sobelImage, ref gradientMean, ref gradientStd, mask);
                        double nSigma = 5;
                        gradientLimit = gradientMean.V0 + nSigma * gradientStd.V0;
                    }

                    if (isTopBottom)
                    {
                        CvInvoke.Transpose(sobelImage, workImage);
                    }
                    else
                    {
                        sobelImage.CopyTo(workImage);
                    }
                }

                inputImage.ROI = origRoi;

                List <PointF> edgePoints = new List <PointF>();
                List <float>  fullWidthHalfMaximumVals = new List <float>();
                var           sobelData = workImage.Data;
                int           stride    = 1;

                for (int r = 0; r < workingHeight; r += stride)
                {
                    int approxEdgeCol = 0;
                    if (!startFromRight)
                    {
                        for (int c = 0; c < workingWidth; c++)
                        {
                            var currentValue = sobelData[r, c, 0];
                            if (currentValue > gradientLimit)
                            {
                                approxEdgeCol = c;
                                break;
                            }
                        }
                    }
                    else
                    {
                        for (int c = workingWidth - 1; c > 0; c--)
                        {
                            var currentValue = sobelData[r, c, 0];
                            if (currentValue > gradientLimit)
                            {
                                approxEdgeCol = c;
                                break;
                            }
                        }
                    }

                    int   meanEdgeCol     = 0;
                    float maxValue        = 0;
                    var   currentStartCol = Math.Max(approxEdgeCol - 5, 1);
                    var   currentEndCol   = Math.Min(approxEdgeCol + 5 + 1, workingWidth - 1);
                    for (int c = currentStartCol; c < currentEndCol; c++)
                    {
                        if (sobelData[r, c, 0] > maxValue)
                        {
                            maxValue    = sobelData[r, c, 0];
                            meanEdgeCol = c;
                        }
                    }

                    if (!(maxValue > 0))
                    {
                        continue;
                    }

                    float halfMaxLeftValue  = maxValue;
                    float halfMaxRightValue = maxValue;
                    int   halfMaxLeftCol    = meanEdgeCol;
                    int   halfMaxRightCol   = meanEdgeCol;

                    while (halfMaxLeftValue > maxValue / 2 && halfMaxLeftCol >= 0)
                    {
                        halfMaxLeftCol--;
                        halfMaxLeftValue = sobelData[r, halfMaxLeftCol, 0];
                    }
                    while (halfMaxRightValue > maxValue / 2 && halfMaxRightCol < workingWidth)
                    {
                        halfMaxRightCol++;
                        halfMaxRightValue = sobelData[r, halfMaxRightCol, 0];
                    }

                    float fwhm = halfMaxRightCol - halfMaxLeftCol;

                    //Interpolation
                    float dPixel = (maxValue / 2 - halfMaxLeftValue) /
                                   (sobelData[r, halfMaxLeftCol + 1, 0] - halfMaxLeftValue);
                    fwhm  -= dPixel;
                    dPixel = (maxValue / 2 - halfMaxRightValue) /
                             (sobelData[r, halfMaxRightCol - 1, 0] - halfMaxRightValue);
                    fwhm -= dPixel;

                    fullWidthHalfMaximumVals.Add(fwhm);


                    edgePoints.Add(isTopBottom
                        ? new PointF(r + sideRoi.X, meanEdgeCol + sideRoi.Y)
                        : new PointF(r + sideRoi.Y, meanEdgeCol + sideRoi.X));
                }

                VectorOfPointF yvector    = new VectorOfPointF();
                VectorOfFloat  parameters = new VectorOfFloat();
                yvector.Push(edgePoints.ToArray());
                CvInvoke.FitLine(yvector, parameters, DistType.L12, 0, 0.01, 0.01);

                float vx = parameters[0];
                float vy = parameters[1];
                float x0 = parameters[2];
                float y0 = parameters[3];

                edge.FitParams = parameters;
                edge.Slope     = vy / vx;
                edge.Intercept = y0 - edge.Slope * x0;

                fullWidthHalfMaximumVals.Sort();
                int length = fullWidthHalfMaximumVals.Count;

                if (length % 2 == 0)
                {
                    edge.LineSpread = (fullWidthHalfMaximumVals[length / 2 - 1] + fullWidthHalfMaximumVals[length / 2]) / 2;
                }
                else
                {
                    edge.LineSpread = fullWidthHalfMaximumVals[length / 2];
                }

                if (!isTopBottom)
                {
                    edge.InvertedRepresentation = true;
                }
            }
        }
        private Task DoValidate(object o)
        {
            return(Task.Factory.StartNew(() =>
            {
                Parent.SyncContext.Post(async c =>
                {
                    VectorOfInt allIds = new VectorOfInt();
                    VectorOfVectorOfPointF allCorners = new VectorOfVectorOfPointF();
                    VectorOfInt allCharucoIds = new VectorOfInt();
                    VectorOfPointF allCharucoCorners = new VectorOfPointF();
                    VectorOfInt markerCounterPerFrame = new VectorOfInt();
                    VectorOfInt charucoCounterPerFrame = new VectorOfInt();
                    int squaresX = 0;
                    int squaresY = 0;
                    float squareLength = 0f;
                    float markerLength = 0f;
                    PredefinedDictionaryName dictionary = PredefinedDictionaryName.Dict4X4_50;
                    System.Drawing.Size size = new System.Drawing.Size();

                    bool fisheye = false;

                    Parent.SyncContext.Send(async d =>
                    {
                        fisheye = Parent.CameraViewModel.FishEyeCalibration;

                        squaresX = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.SquaresX;
                        squaresY = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.SquaresY;
                        squareLength = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.SquareLength;
                        markerLength = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.MarkerLength;
                        dictionary = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.Dictionary;
                        size = new System.Drawing.Size(Parent.CameraViewModel.ImageWidth, Parent.CameraViewModel.ImageHeight);

                        foreach (ChArUcoImageContainer image in Images)
                        {
                            if (image.MarkerCorners != null && image.CharucoCorners.Size > 4)
                            {
                                allIds.Push(image.MarkerIds);
                                allCorners.Push(image.MarkerCorners);
                                allCharucoIds.Push(image.CharucoIds);
                                allCharucoCorners.Push(image.CharucoCorners);
                                markerCounterPerFrame.Push(new int[] { image.MarkerCorners.Size });
                                charucoCounterPerFrame.Push(new int[] { image.CharucoCorners.Size });
                            }
                        }
                    }, null);

                    if (markerCounterPerFrame.Size > 0)
                    {
                        MetroDialogSettings settings = new MetroDialogSettings()
                        {
                            AnimateShow = false,
                            AnimateHide = false
                        };

                        var controller = await Parent.DialogCoordinator.ShowProgressAsync(Parent, "Please wait...", "Validate parameter now!", settings: Parent.MetroDialogSettings);
                        controller.SetIndeterminate();
                        controller.SetCancelable(false);

                        bool error = false;
                        double rms = 0.0;
                        try
                        {
                            Mat cameraMatrix = new Mat(3, 3, Emgu.CV.CvEnum.DepthType.Cv64F, 1);
                            Mat distCoeffs = new Mat(1, Parent.CameraViewModel.FishEyeCalibration ? 4 : _DistCoeffs.Count, Emgu.CV.CvEnum.DepthType.Cv64F, 1);

                            cameraMatrix.SetValue(0, 0, Fx);
                            cameraMatrix.SetValue(1, 1, Fy);
                            cameraMatrix.SetValue(0, 1, Fx * Alpha);
                            cameraMatrix.SetValue(0, 2, Cx);
                            cameraMatrix.SetValue(1, 2, Cy);
                            cameraMatrix.SetValue(2, 2, 1.0f);

                            for (int i = 0; i < distCoeffs.Cols && (Parent.CameraViewModel.FishEyeCalibration ? i < 4 : true); i++)
                            {
                                distCoeffs.SetValue(0, i, _DistCoeffs[i]);
                            }

                            rms = ChArUcoCalibration.ValidateCharuco(squaresX, squaresY, squareLength, markerLength, dictionary, size, allCharucoIds, allCharucoCorners, charucoCounterPerFrame, fisheye, delegate(byte[] input)
                            {
                                return Parent.IOProxy.GetRemoteChessboardCorner(input);
                            }, cameraMatrix, distCoeffs);
                        }
                        catch (Exception ex)
                        {
                            error = true;
                        }

                        await controller.CloseAsync();
                        if (!error)
                        {
                            var con = await Parent.DialogCoordinator.ShowMessageAsync(Parent, "Result", string.Format("RMS: {0}", rms), MessageDialogStyle.Affirmative, null);
                        }
                        else
                        {
                            await Parent.DialogCoordinator.ShowMessageAsync(Parent, "Error", "Error during validation!");
                        }
                    }
                    else
                    {
                        await Parent.DialogCoordinator.ShowMessageAsync(Parent, "Error", "Not enough valide input frames available!");
                    }
                }, null);
            }));
        }
Beispiel #16
0
 /// <summary>
 /// Push a single point to a vector of points
 /// </summary>
 /// <param name="vector">Vector to push the point to</param>
 /// <param name="point">Point to push to the vector</param>
 public static void Push(this VectorOfPointF vector, PointF point)
 {
     vector.Push(new PointF[] { point });
 }
Beispiel #17
0
        public void MainStuff()
        {
            SRC_Img       = new Image <Gray, byte>(@"C:\Users\Админ\Downloads\image63341262,2002.png");
            Corrected_Img = SRC_Img.Clone();

            //CvInvoke.CLAHE(SRC_Img, 40, new Size(8, 8), Corrected_Img);
            //CvInvoke.FindChessboardCorners(SRC_Img, new Size(8,8), vec);
            #region
            PointF[] corners = new PointF[] { new PointF(100, 196), new PointF(261, 190), new PointF(417, 192), new PointF(584, 201),
                                              new PointF(111, 277), new PointF(284, 287), new PointF(458, 291), new PointF(580, 284),
                                              new PointF(130, 368), new PointF(276, 395), new PointF(429, 391), new PointF(563, 365) };
            #endregion
            VectorOfPointF vec = new VectorOfPointF();
            vec.Push(corners);
            // X: 0 - 480 / 3 ||0 159 329 479
            // Y: 0 - 210 / 2 || 0 104 209

            MCvPoint3D32f[] objCorners = new MCvPoint3D32f[] { new MCvPoint3D32f(0, 0, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, 0, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, 0, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, 0, 0.0f),
                                                               new MCvPoint3D32f(0, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, SRC_Img.Height / 2 - 1, 0.0f),
                                                               new MCvPoint3D32f(0, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(2 * SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f), new MCvPoint3D32f(SRC_Img.Width - 1, SRC_Img.Height - 1, 0.0f) };

            /*
             * for (int i = 0; i < objCorners.Length; i++)
             * {
             * objCorners[i].X += SRC_Img.Width / 2;
             * objCorners[i].Y += SRC_Img.Height / 2;
             * }*/
            //VectorOfPointF objvec = new VectorOfPointF();
            //objvec.Push(objCorners);


            //Corrected_Img = FindTable(SRC_Img);
            Matrix <double> CameraMatrix = new Matrix <double>(3, 3, 1);
            CameraMatrix[0, 0] = 1;
            CameraMatrix[1, 1] = 1;
            CameraMatrix[2, 2] = 1;
            CameraMatrix[0, 2] = 349.417;
            CameraMatrix[1, 2] = 286.417;

            Mat newCameraMatrix = CvInvoke.GetDefaultNewCameraMatrix(CameraMatrix);
            //CvInvoke.Undistort(SRC_Img, Corrected_Img,
            //CvInvoke.FindChessboardCorners(SRC_Img, new System.Drawing.Size(5,5),

            Mat             distCoeffs    = new Mat(1, 5, DepthType.Cv32F, 1);
            Mat             rotCoeffs     = new Mat();
            Mat             translVectors = new Mat();
            MCvTermCriteria TermCriteria  = new MCvTermCriteria(30, 0.1);
            Corrected_Img = SRC_Img.Clone();
            CvInvoke.DrawChessboardCorners(Corrected_Img, new System.Drawing.Size(4, 3), vec, true);
            //CvInvoke.CornerSubPix(SRC_Img, vec, new Size(2, 2), new Size(-1, -1), TermCriteria);
            //CvInvoke.DrawChessboardCorners(SRC_Img, new System.Drawing.Size(4, 3), objvec, true);

            /*
             * try
             * {
             * CvInvoke.Remap(SRC_Img, Corrected_Img, vec, objvec, Inter.Nearest, BorderType.Constant);
             * } catch (Exception ex) { string s = ex.Message; }
             */
            VectorOfPoint3D32F obj3dvec = new VectorOfPoint3D32F();
            obj3dvec.Push(objCorners);

            try
            {
                MCvPoint3D32f[][] corners_object_list = new MCvPoint3D32f[1][];
                PointF[][]        corners_points_list = new PointF[1][];
                corners_object_list[0] = objCorners;
                corners_points_list[0] = corners;
                double r = CvInvoke.CalibrateCamera(obj3dvec,
                                                    vec,
                                                    SRC_Img.Size,
                                                    CameraMatrix,
                                                    distCoeffs,
                                                    rotCoeffs,
                                                    translVectors,
                                                    CalibType.Default,
                                                    TermCriteria);

                //double error = CameraCalibration.CalibrateCamera(corners_object_list, corners_points_list, Gray_Frame.Size, IC, Emgu.CV.CvEnum.CALIB_TYPE.CV_CALIB_RATIONAL_MODEL, out EX_Param);
                r += 0;
                //Matrix<float> dist = new Matrix<float>( new float[] {

                //CvInvoke.Undistort(SRC_Img, Corrected_Img, cameraMatrix, );
            } catch (Exception ex) { }

            IntrinsicCameraParameters IC = new IntrinsicCameraParameters(8);
            Matrix <float>            Map1, Map2;
            IC.InitUndistortMap(SRC_Img.Width, SRC_Img.Height, out Map1, out Map2);
            Image <Gray, Byte> stuff = Undistort(SRC_Img);

            imageBox1.Image = SRC_Img.Resize(imageBox1.Width, imageBox1.Height, Inter.Linear);
            imageBox2.Image = Corrected_Img.Resize(imageBox1.Width, imageBox1.Height, Inter.Linear);
        }
Beispiel #18
0
        public void SuperR()
        {
            SRC_Img       = new Image <Gray, byte>(@"C:\Users\Админ\Downloads\image63341262,2002.png");
            Corrected_Img = SRC_Img.Clone();

            PointF[] corners = new PointF[] { new PointF(100, 196), new PointF(261, 190), new PointF(417, 192), new PointF(584, 201),
                                              new PointF(111, 277), new PointF(284, 287), new PointF(458, 291), new PointF(580, 284),
                                              new PointF(130, 368), new PointF(276, 395), new PointF(429, 391), new PointF(563, 365) };

            /*MCvPoint3D32f[] objCorners = new MCvPoint3D32f[] { new MCvPoint3D32f( 0, 0, 0.0f),    new MCvPoint3D32f(SRC_Img.Width / 3 - 1, 0, 0.0f),       new MCvPoint3D32f( 2 * SRC_Img.Width / 3 - 1, 0, 0.0f),    new MCvPoint3D32f( SRC_Img.Width - 1, 0, 0.0f),
             *                                  new MCvPoint3D32f( 0, SRC_Img.Height / 2 - 1, 0.0f),  new MCvPoint3D32f(SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f),     new MCvPoint3D32f( 2 * SRC_Img.Width / 3 - 1, SRC_Img.Height / 2 - 1, 0.0f),  new MCvPoint3D32f( SRC_Img.Width - 1, SRC_Img.Height / 2 - 1, 0.0f),
             *                                  new MCvPoint3D32f( 0, SRC_Img.Height - 1, 0.0f),  new MCvPoint3D32f( SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f),    new MCvPoint3D32f( 2 * SRC_Img.Width / 3 - 1, SRC_Img.Height - 1, 0.0f),  new MCvPoint3D32f( SRC_Img.Width - 1, SRC_Img.Height - 1, 0.0f)
             *                            };
             */
            // X: 0 - 480 / 3 ||0 159 329 479
            // Y: 0 - 210 / 2 || 0 104 209

            MCvPoint3D32f[] objCorners = new MCvPoint3D32f[] { new MCvPoint3D32f(0, 0, 0.0f), new MCvPoint3D32f(159, 0, 0.0f), new MCvPoint3D32f(329, 0, 0.0f), new MCvPoint3D32f(479, 0, 0.0f),
                                                               new MCvPoint3D32f(0, 104, 0.0f), new MCvPoint3D32f(159, 104, 0.0f), new MCvPoint3D32f(329, 104, 0.0f), new MCvPoint3D32f(479, 104, 0.0f),
                                                               new MCvPoint3D32f(0, 209, 0.0f), new MCvPoint3D32f(159, 209, 0.0f), new MCvPoint3D32f(329, 209, 0.0f), new MCvPoint3D32f(479, 209, 0.0f) };

            VectorOfPointF veccorners = new VectorOfPointF();

            veccorners.Push(corners);
            VectorOfPoint3D32F vecobjcorners = new VectorOfPoint3D32F();

            vecobjcorners.Push(objCorners);

            MCvTermCriteria TermCriteria = new MCvTermCriteria(30, 0.1);

            CvInvoke.CornerSubPix(SRC_Img, veccorners, new Size(2, 2), new Size(-1, -1), TermCriteria);

            IntrinsicCameraParameters intrisic = new IntrinsicCameraParameters();

            ExtrinsicCameraParameters[] extrinsic;
            intrisic.IntrinsicMatrix = new Matrix <double>(new double[, ] {
                { 1, 0, 349.417 }, { 0, 1, 286.417 }, { 0, 0, 1 }
            });
            try
            {
                Matrix <float> distortCoeffs   = new Matrix <float>(1, 4);
                Mat            rotationVectors = new Mat();
                //rotationVectors[0] = new Mat(3,1, DepthType.Cv32F, 1);
                Mat translationVectors = new Mat();
                //translationVectors[0] = new Mat(1, 3, DepthType.Cv32F, 1);

                /*
                 * double error = CvInvoke.CalibrateCamera(new MCvPoint3D32f[][] { objCorners }, new PointF[][] { veccorners.ToArray() },
                 *   SRC_Img.Size, intrisic.IntrinsicMatrix, distortCoeffs, CalibType.UserIntrinsicGuess, new MCvTermCriteria(30, 0.01), out rotationVectors, out translationVectors);
                 */
                /*
                 *
                 * Fisheye.Calibrate(vecobjcorners, veccorners, SRC_Img.Size, intrisic.IntrinsicMatrix, distortCoeffs, rotationVectors, translationVectors,
                 * Fisheye.CalibrationFlag.UseIntrinsicGuess, TermCriteria);
                 * */

                Matrix <float> matrix = new Matrix <float>(new float[, ] {
                    { 1, 0, 349 }, { 0, 1, 286 }, { 0, 0, 1 }
                });
                Fisheye.UndistorImage(SRC_Img, Corrected_Img, matrix, new VectorOfFloat(new float[] { 3500, 3500, 0, 0 }));
                Image <Gray, Byte> Res_Img = new Image <Gray, byte>(2 * SRC_Img.Width, SRC_Img.Height);
                CvInvoke.HConcat(SRC_Img, Corrected_Img, Res_Img);
                int error = 0;
                error++;
                //error += 0;
                //Array aa = rotationVectors[0].Data;
                //error += 0;
                //float q = rotationVectors.ElementAt<float>(0);
            }
            catch (Exception) { }
        }
        private Task DoCalibrate(object o)
        {
            return(Task.Factory.StartNew(async() =>
            {
                VectorOfInt allIds = new VectorOfInt();
                VectorOfVectorOfPointF allCorners = new VectorOfVectorOfPointF();
                VectorOfInt allCharucoIds = new VectorOfInt();
                VectorOfPointF allCharucoCorners = new VectorOfPointF();
                VectorOfInt markerCounterPerFrame = new VectorOfInt();
                VectorOfInt charucoCounterPerFrame = new VectorOfInt();
                int squaresX = 0;
                int squaresY = 0;
                float squareLength = 0f;
                float markerLength = 0f;
                PredefinedDictionaryName dictionary = PredefinedDictionaryName.Dict4X4_50;
                System.Drawing.Size size = new System.Drawing.Size();

                bool fisheye = false;

                Parent.SyncContext.Send(async c =>
                {
                    fisheye = Parent.CameraViewModel.FishEyeCalibration;

                    squaresX = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.SquaresX;
                    squaresY = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.SquaresY;
                    squareLength = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.SquareLength;
                    markerLength = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.MarkerLength;
                    size = Parent.CameraViewModel.Image.CvImage.Size;
                    dictionary = Parent.SettingContainer.Settings.CalibrationSettings.ChArucoCalibrationSettings.Dictionary;
                    foreach (ChArUcoImageContainer image in Images)
                    {
                        if (image.MarkerCorners != null && image.CharucoCorners.Size > 4)
                        {
                            allIds.Push(image.MarkerIds);
                            allCorners.Push(image.MarkerCorners);
                            allCharucoIds.Push(image.CharucoIds);
                            allCharucoCorners.Push(image.CharucoCorners);
                            markerCounterPerFrame.Push(new int[] { image.MarkerCorners.Size });
                            charucoCounterPerFrame.Push(new int[] { image.CharucoCorners.Size });
                        }
                    }
                }, null);

                if (markerCounterPerFrame.Size > 0)
                {
                    MetroDialogSettings settings = new MetroDialogSettings()
                    {
                        AnimateShow = true,
                        AnimateHide = true
                    };

                    var controller = await Parent.DialogCoordinator.ShowProgressAsync(Parent, "Please wait...", "Calculating calibration parameter now!", settings: settings);
                    controller.SetIndeterminate();
                    controller.SetCancelable(false);

                    bool error = false;
                    (Mat cameraMatrix, Mat distCoeffs, double rms)result = (null, null, 0.0);
                    try
                    {
                        result = ChArUcoCalibration.CalibrateCharuco(squaresX, squaresY, squareLength, markerLength, dictionary, size, allCharucoIds, allCharucoCorners, charucoCounterPerFrame, fisheye, delegate(byte[] input)
                        {
                            return Parent.IOProxy.GetRemoteChessboardCorner(input);
                        });
                    }
                    catch (Exception ex)
                    {
                        error = true;
                    }

                    await controller.CloseAsync();
                    if (!error)
                    {
                        var con = await Parent.DialogCoordinator.ShowMessageAsync(Parent, "Result", string.Format("RMS: {0}\nDo you want to save?", result.rms), MessageDialogStyle.AffirmativeAndNegative, null);
                        if (con == MessageDialogResult.Affirmative)
                        {
                            Parent.SyncContext.Post(async c =>
                            {
                                Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Fx = result.cameraMatrix.GetValue(0, 0);
                                Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Fy = result.cameraMatrix.GetValue(1, 1);
                                Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Cx = result.cameraMatrix.GetValue(0, 2);
                                Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Cy = result.cameraMatrix.GetValue(1, 2);
                                Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.Alpha = result.cameraMatrix.GetValue(0, 1) / result.cameraMatrix.GetValue(0, 0);;

                                Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.DistCoeffs.Clear();

                                if (Parent.CameraViewModel.FishEyeCalibration)
                                {
                                    for (int i = 0; i < result.distCoeffs.Rows && i < 8; i++)
                                    {
                                        Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.DistCoeffs.Add(result.distCoeffs.GetValue(i, 0));
                                    }
                                }
                                else
                                {
                                    for (int i = 0; i < result.distCoeffs.Cols && i < 8; i++)
                                    {
                                        Parent.SettingContainer.Settings.CalibrationSettings.IntrinsicCalibrationSettings.DistCoeffs.Add(result.distCoeffs.GetValue(0, i));
                                    }
                                }

                                Parent.UpdateSettings(false);
                            }, null);
                        }
                    }
                    else
                    {
                        await Parent.DialogCoordinator.ShowMessageAsync(Parent, "Error", "Error during calibration!");
                    }
                }
                else
                {
                    await Parent.DialogCoordinator.ShowMessageAsync(Parent, "Error", "Not enough valide input frames available!");
                }
            }));
        }
Beispiel #20
0
        private void MorphTriangle(ref Mat img1, ref Mat img2, ref Mat imgM, ref VectorOfPointF t1, ref VectorOfPointF t2, ref VectorOfPointF tM, float alpha)
        {
            // Find bounding rectangle for each triangle
            Rectangle r1 = CvInvoke.BoundingRectangle(t1);
            Rectangle r2 = CvInvoke.BoundingRectangle(t2);
            Rectangle rM = CvInvoke.BoundingRectangle(tM);

            // Offset points by left top corner of the respective rectangles
            VectorOfPointF t1RectFlt = new VectorOfPointF();
            VectorOfPointF t2RectFlt = new VectorOfPointF();
            VectorOfPointF tMRectFlt = new VectorOfPointF();

            // for fillConvexPoly we need ints
            VectorOfPoint tMrectInt = new VectorOfPoint();

            for (int i = 0; i < 3; i++)
            {
                PointF[] pfArrM = { new PointF(tM[i].X - rM.X, tM[i].Y - rM.Y) };
                tMRectFlt.Push(pfArrM);

                Point[] pArrInt = { new Point((int)(tM[i].X - rM.X), (int)(tM[i].Y - rM.Y)) };
                tMrectInt.Push(pArrInt);

                PointF[] pfArr1 = { new PointF(t1[i].X - r1.X, t1[i].Y - r1.Y) };
                t1RectFlt.Push(pfArr1);

                PointF[] pfArr2 = { new PointF(t2[i].X - r2.X, t2[i].Y - r2.Y) };
                t2RectFlt.Push(pfArr2);
            }

            // Create white triangle mask
            Mat mask = Mat.Zeros(rM.Height, rM.Width, Emgu.CV.CvEnum.DepthType.Cv32F, 3);

            CvInvoke.FillConvexPoly(mask, tMrectInt, new MCvScalar(1.0, 1.0, 1.0), Emgu.CV.CvEnum.LineType.AntiAlias, 0); // different

            // Apply warpImage to small rectangular patches
            Mat img1Rect = new Mat(img1, r1);
            Mat img2Rect = new Mat(img2, r2);


            Mat warpImage1 = Mat.Zeros(rM.Height, rM.Width, Emgu.CV.CvEnum.DepthType.Cv32F, 3);
            Mat warpImage2 = Mat.Zeros(rM.Height, rM.Width, Emgu.CV.CvEnum.DepthType.Cv32F, 3);


            ApplyAffineTransform(ref warpImage1, ref img1Rect, ref t1RectFlt, ref tMRectFlt);
            ApplyAffineTransform(ref warpImage2, ref img2Rect, ref t2RectFlt, ref tMRectFlt);


            // Alpha blend rectangular patches into new image
            Mat imgRect = new Mat();
            Image <Bgr, Byte> imgRect_I = (1.0f - alpha) * warpImage1.ToImage <Bgr, Byte>() + alpha * warpImage2.ToImage <Bgr, Byte>();

            imgRect = imgRect_I.Mat;

            // Delete all outside of triangle
            imgRect.ConvertTo(imgRect, Emgu.CV.CvEnum.DepthType.Cv32F);
            mask.ConvertTo(mask, Emgu.CV.CvEnum.DepthType.Cv32F);
            CvInvoke.Multiply(imgRect, mask, imgRect);

            // Delete all inside the target triangle
            Mat tmp = new Mat(imgM, rM);
            Image <Bgr, Byte> tmpI = tmp.ToImage <Bgr, Byte>();
            Mat mask_cp            = new Mat();

            mask.CopyTo(mask_cp);

            Image <Bgr, Byte> tmp_maskI = mask.ToImage <Bgr, Byte>();

            mask_cp.SetTo(new MCvScalar(1.0f, 1.0f, 1.0f));

            CvInvoke.Subtract(mask_cp, mask, mask);
            CvInvoke.Multiply(tmp, mask, tmp);
            count++;

            // Add morphed triangle to target image
            CvInvoke.Add(tmp, imgRect, tmp); // img(rM) = tmp;
            Mat x = new Mat(imgM, rM);

            tmp.CopyTo(x);
        }
Beispiel #21
0
        public void Tick(Object sender, EventArgs args)
        {
            if (mKinect != null)
            {
                MultiSourceFrame frame = mFrameReader.AcquireLatestFrame();

                TrackFingers();

                if (frame != null)
                {
                    using (var depthFrame = frame.DepthFrameReference.AcquireFrame())
                    {
                        if (depthFrame != null)
                        {
                            if (depthData == null)
                            {
                                depthWidth  = depthFrame.FrameDescription.Width;
                                depthHeight = depthFrame.FrameDescription.Height;

                                depthData = new ushort[depthWidth * depthHeight];
                                pixelData = new byte[depthWidth * depthHeight * 3];
                                mFrame    = new Mat(depthHeight, depthWidth, DepthType.Cv8U, 1);
                            }

                            ushort minDepth = depthFrame.DepthMinReliableDistance;
                            ushort maxDepth = depthFrame.DepthMaxReliableDistance;

                            depthFrame.CopyFrameDataToArray(depthData);
                            Image <Gray, Byte> img = mFrame.ToImage <Gray, Byte>();

                            for (int i = 0; i < depthData.Length; i++)
                            {
                                ushort depth = depthData[i];
                                //byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0);
                                byte intensity = (byte)(depth < 1000 && depth > 10 ? 0 : 255);

                                img.Data[i / depthWidth, i % depthWidth, 0] = intensity;
                            }

                            mFrame = img.Mat;

                            // DISPLAY Depth image
                            //(Controls["FrameImageBox"] as ImageBox).Image = img;

                            //*********************
                            // Gaussian Blur
                            //*********************
                            CvInvoke.GaussianBlur(img, img, new Size(5, 5), 0);

                            //*********************
                            // Threshold
                            //*********************
                            //mFrame = img.Mat;

                            //Mat thresholds = new Mat(); ;

                            //CvInvoke.Threshold(mFrame, thresholds, THRESHOLD, THRESHOLD_MAX_VALUE, ThresholdType.Binary);

                            //// DISPLAY Thresholds
                            //(Controls["FrameImageBox"] as ImageBox).Image = img;

                            //*********************
                            // Contours
                            //*********************
                            Mat hierarchy = new Mat();

                            VectorOfVectorOfPoint  contours            = new VectorOfVectorOfPoint();
                            VectorOfVectorOfPointF significantContours = new VectorOfVectorOfPointF();
                            CvInvoke.FindContours(mFrame, contours, hierarchy, RetrType.Tree, ChainApproxMethod.ChainApproxNone);

                            Image <Gray, Byte> contourImage = new Image <Gray, Byte>(mFrame.Size);

                            for (int i = 0; i < contours.Size; i++)
                            {
                                if (CvInvoke.ContourArea(contours[i]) > 500.0)
                                {
                                    VectorOfPointF          bigContour = new VectorOfPointF();
                                    System.Drawing.PointF[] points     = new System.Drawing.PointF[contours[i].Size];
                                    Point[] intPoints = contours[i].ToArray();

                                    for (int j = 0; j < intPoints.Length; j++)
                                    {
                                        points[j] = intPoints[j];
                                    }

                                    bigContour.Push(points);
                                    significantContours.Push(bigContour);
                                }
                            }

                            //if (contours.Size > 0)
                            //{
                            //    CvInvoke.DrawContours(contourImage, significantContours, -1, new MCvScalar(255, 0, 0));
                            //}

                            //(Controls["FrameImageBox"] as ImageBox).Image = contourImage;

                            //*********************
                            // Convex Hulls
                            //*********************
                            for (int i = 0; i < significantContours.Size; i++)
                            {
                                System.Drawing.PointF[] hullPoints;
                                VectorOfPoint           contourPoints = new VectorOfPoint(Array.ConvertAll(significantContours[i].ToArray(), Point.Round));
                                VectorOfInt             convexHull    = new VectorOfInt();

                                hullPoints = CvInvoke.ConvexHull(significantContours[i].ToArray());
                                CvInvoke.ConvexHull(contourPoints, convexHull);

                                CvInvoke.Polylines(mFrame, Array.ConvertAll(hullPoints, Point.Round), true, new MCvScalar(255, 255, 255));

                                // How many defects tho?
                                //VectorOfVectorOfInt defects = new VectorOfVectorOfInt();
                                Mat defects = new Mat();
                                CvInvoke.ConvexityDefects(contourPoints /*significantContours[i]*/,
                                                          convexHull /*new VectorOfPointF(hullPoints)*/,
                                                          defects);

                                if (!defects.IsEmpty)
                                {
                                    Matrix <int> m = new Matrix <int>(defects.Rows, defects.Cols, defects.NumberOfChannels);
                                    defects.CopyTo(m);

                                    List <Point> validPoints = new List <Point>();

                                    // Draw tha defacts
                                    for (int d = 0; d < m.Rows; d++)
                                    {
                                        int startIndex    = m.Data[d, 0];
                                        int endIndex      = m.Data[d, 1];
                                        int farthestIndex = m.Data[d, 2];

                                        Point farthestPoint = contourPoints[farthestIndex];
                                        Point startPoint    = contourPoints[startIndex];

                                        if (IsDefectUnique(startPoint, validPoints) &&
                                            IsDefectOutsideHandRadius(startPoint))
                                        {
                                            validPoints.Add(startPoint);
                                        }

                                        //if (true/*endIndex - startIndex > 10*/)
                                        //{
                                        //    CvInvoke.Circle(mFrame, startPoint, 3, new MCvScalar(255, 0, 0), 2);
                                        //}
                                    }

                                    // Draw valid indices
                                    foreach (Point p in validPoints)
                                    {
                                        CvInvoke.Circle(mFrame, p, 3, new MCvScalar(255, 0, 0), 2);
                                    }
                                }
                            }

                            (Controls["FrameImageBox"] as ImageBox).Image = mFrame;
                        }
                    }
                }
            }
        }
Beispiel #22
0
        //public MorphImage(Mat img1, Mat img2, VectorOfPointF points1, VectorOfPointF points2, float alpha)
        public MorphImage(ImageDetails imgdet1, ImageDetails imgdet2, VectorOfPointF points1, VectorOfPointF points2, float alpha)
        {
            this.img1  = imgdet1.ResizedImage.Mat;
            this.img2  = imgdet2.ResizedImage.Mat;
            this.alpha = alpha;

            img1.ConvertTo(img1, Emgu.CV.CvEnum.DepthType.Cv32F);
            img2.ConvertTo(img2, Emgu.CV.CvEnum.DepthType.Cv32F);

            this.points1 = points1;
            this.points2 = points2;

            if (points1.Size == 68)
            {
                AddCornerPoints(points1, img1);
            }

            if (points2.Size == 68)
            {
                AddCornerPoints(points2, img2);
            }

            // Add Points for whole image
            //points1 = AddCornerPoints(points1, img1); // todo: corner points get added twice
            //points2 = AddCornerPoints(points2, img2);

            // Create an instance of Subdiv2D
            Rectangle rect   = new Rectangle(0, 0, img1.Size.Width, img1.Size.Height);
            Subdiv2D  subdiv = new Subdiv2D(rect);

            // Create and Draw the Delaunay triangulation
            triangleIndexes = new VectorOfVectorOfInt();
            CreateDelaunay(ref img1, ref subdiv, ref points1, false, ref triangleIndexes);

            //// Draw the Delaunay triangulation of face 1
            //Mat img1D = img1.Clone();
            //DrawDelaunay(ref img1D, ref subdiv, new MCvScalar(255, 255, 255));


            //// Draw the Delaunay triangulation of face 2
            //Mat img2D = img2.Clone();
            //DrawDelaunay(ref img2D, ref points2, triangleIndexes, new MCvScalar(255, 255, 255));
            //img2D.ConvertTo(img2D, Emgu.CV.CvEnum.DepthType.Cv8U);

            //compute weighted average point coordinates
            pointsM = new VectorOfPointF();
            for (int i = 0; i < points1.Size; i++)
            {
                float    x  = (1 - alpha) * points1[i].X + alpha * points2[i].X;
                float    y  = (1 - alpha) * points1[i].Y + alpha * points2[i].Y;
                PointF[] pf = { new PointF(x, y) };
                pointsM.Push(pf);
            }

            //empty image for morphed face
            int rowsM, colsM;

            if (img1.Cols >= img2.Cols)
            {
                colsM = img1.Cols;
            }
            else
            {
                colsM = img2.Cols;
            }
            if (img1.Rows >= img2.Rows)
            {
                rowsM = img1.Rows;
            }
            else
            {
                rowsM = img2.Rows;
            }


            imgM = Mat.Zeros(rowsM, colsM, Emgu.CV.CvEnum.DepthType.Cv32F, 3);

            for (int i = 0; i < triangleIndexes.Size; i++)
            {
                VectorOfPointF t1 = new VectorOfPointF();
                VectorOfPointF t2 = new VectorOfPointF();
                VectorOfPointF tM = new VectorOfPointF();

                PointF ppft10 = points1[triangleIndexes[i][0]];
                PointF ppft11 = points1[triangleIndexes[i][1]];
                PointF ppft12 = points1[triangleIndexes[i][2]];
                PointF ppft20 = points2[triangleIndexes[i][0]];
                PointF ppft21 = points2[triangleIndexes[i][1]];
                PointF ppft22 = points2[triangleIndexes[i][2]];
                PointF ppftM0 = pointsM[triangleIndexes[i][0]];
                PointF ppftM1 = pointsM[triangleIndexes[i][1]];
                PointF ppftM2 = pointsM[triangleIndexes[i][2]];

                PointF[] pft10 = { new PointF(ppft10.X, ppft10.Y) };
                PointF[] pft11 = { new PointF(ppft11.X, ppft11.Y) };
                PointF[] pft12 = { new PointF(ppft12.X, ppft12.Y) };
                PointF[] pft20 = { new PointF(ppft20.X, ppft20.Y) };
                PointF[] pft21 = { new PointF(ppft21.X, ppft21.Y) };
                PointF[] pft22 = { new PointF(ppft22.X, ppft22.Y) };
                PointF[] pftM0 = { new PointF(ppftM0.X, ppftM0.Y) };
                PointF[] pftM1 = { new PointF(ppftM1.X, ppftM1.Y) };
                PointF[] pftM2 = { new PointF(ppftM2.X, ppftM2.Y) };

                t1.Push(pft10);
                t1.Push(pft11);
                t1.Push(pft12);
                t2.Push(pft20);
                t2.Push(pft21);
                t2.Push(pft22);
                tM.Push(pftM0);
                tM.Push(pftM1);
                tM.Push(pftM2);

                MorphTriangle(ref img1, ref img2, ref imgM, ref t1, ref t2, ref tM, alpha);
            }
            imgM.ConvertTo(imgM, Emgu.CV.CvEnum.DepthType.Cv8U);
            //CvInvoke.Imshow("Morphed Face", imgM);
        }