public List <Rectangle> DetectLetters(Image <Bgr, Byte> img)
        {
            var hash = img.GetHashCode();
            List <Rectangle> rects = new List <Rectangle>();
            var imgGray            = img.Convert <Gray, Byte>();
            var imgSobel           = imgGray.Sobel(1, 0, 3).Convert <Gray, Byte>();
            var imgRes             = new Image <Gray, byte>(imgSobel.Size);

            CvInvoke.Threshold(imgSobel, imgRes, 160, 255, ThresholdType.Binary | ThresholdType.Otsu);
            var element = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(2, 2), new Point(-1, -1));

            CvInvoke.Dilate(imgRes, imgRes, element, new Point(0, 0), 1, BorderType.Default, new MCvScalar(0));
            CvInvoke.Erode(imgRes, imgRes, element, new Point(0, 0), 12, BorderType.Default, new MCvScalar(0));
            using (Mat hierachy = new Mat())
                using (VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint())
                {
                    CvInvoke.FindContours(imgRes, contours, hierachy, RetrType.Tree, ChainApproxMethod.ChainApproxNone);
                    for (int i = 0; i < contours.Size; i++)
                    {
                        Rectangle rectangle = CvInvoke.BoundingRectangle(contours[i]);
                        var       area      = rectangle.Width * rectangle.Height;
                        if (area > 1400 && rectangle.Width < img.Width * 0.7 && rectangle.Width > rectangle.Height * 1.5)
                        {
                            rects.Add(rectangle);
                        }
                    }
                }
            return(rects);
        }
Пример #2
0
        public void CalibrateCV(ChessBoard cb, out Matrix cameraMat, out Matrix distCoeffs)
        {
            var worldCoordinates = cb.boardLocalCoordinates_cv;

            List <List <Point3f> > worldpoints = new List <List <Point3f> >();

            for (int i = 0; i < images.Count; i++)
            {
                worldpoints.Add(cb.boardLocalCoordinates_cv.ToList());
            }

            double[,] cameraMat2 = new double[3, 3];

            var imagepoints = images.Select(x => x.ImagePoints);



            Matrix cameramat = new Matrix(3, 3);

            distCoeffs = new Matrix(4, 1);
            Mat[] rvecs, tvecs;
            CVI.CalibrateCamera(worldpoints.Select(x => x.ToArray()).ToArray(), imagepoints.ToArray(), images.First().imageSize,
                                cameramat, distCoeffs, CalibType.Default, new MCvTermCriteria(),
                                out rvecs, out tvecs);
            cameraMat = cameramat;
        }
Пример #3
0
        public static void CutSchnitzelImage(string folderPath, IEnumerable <Tuple <string, double> > input = null)
        {
            var path = folderPath + "pic.jpg";

            try
            {
                var mat = CvInvoke.Imread(path, ImreadModes.AnyColor);
                CvInvoke.CvtColor(mat, mat, ColorConversion.Bgr2Rgb);
                CvInvoke.CvtColor(mat, mat, ColorConversion.Rgb2Hsv);
                Image <Bgr, Byte> image = mat.ToImage <Bgr, Byte>();
                var test = image.Convert <Hsv, Byte>();
                Image <Gray, Byte>[] channels = test.Split();
                try
                {
                    var botLimit = new ScalarArray(new MCvScalar(10, 125, 75));
                    var uprLimit = new ScalarArray(new MCvScalar(15, 255, 255));

                    var contours = new VectorOfVectorOfPoint();
                    Image <Hsv, byte> imageHsvDest = new Image <Hsv, byte>(image.Width, image.Width);

                    CvInvoke.InRange(mat, botLimit, uprLimit, imageHsvDest);
                    CvInvoke.FindContours(imageHsvDest, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple);

                    var arrayList = new List <ContourArea>(contours.Size);
                    for (int i = 0; i < contours.Size; i++)
                    {
                        var contour = contours[i];
                        arrayList.Add(new ContourArea(CvInvoke.ContourArea(contour), contour));
                    }

                    var color          = new MCvScalar(255, 0, 0);
                    var biggestContour = arrayList.OrderByDescending(x => x.Area).FirstOrDefault();

                    if (biggestContour == null || biggestContour.Area < 1000)
                    {
                        //This is not a schnitzel
                    }

                    var mask = Mat.Zeros(imageHsvDest.Rows, imageHsvDest.Cols, DepthType.Cv8U, 3);
                    CvInvoke.DrawContours(mask, new VectorOfVectorOfPoint(biggestContour.Contour), 0, color, -1);
                    var newPicPath = DividePicture(path, mask, biggestContour.Area, input);
                    //return newPicPath;
                }
                finally
                {
                    channels[0].Dispose();
                    channels[1].Dispose();
                    channels[2].Dispose();
                }
            }
            catch (Exception e)
            {
                // do nothing
            }
        }
Пример #4
0
        public static PointF[] FindChessboardCorners(IInputArray image, Size ChessboardSize)
        {
            //PointF[] corners;
            VectorOfPointF corners = new VectorOfPointF();

            if (CVI.FindChessboardCorners(image, ChessboardSize, corners))
            {
                return(corners.ToArray());
            }
            throw new Exception("No Corners Found");
        }
Пример #5
0
        private static string DividePicture(string path, Mat mask, double area, IEnumerable <Tuple <string, double> > input)
        {
            var mat = CvInvoke.Imread(path, ImreadModes.AnyColor);

            CvInvoke.CvtColor(mat, mat, ColorConversion.Bgr2Rgb);
            Image <Hsv, Byte> image = mask.ToImage <Hsv, Byte>();
            var points     = image.Data;
            int sum        = 0;
            int cuttingRow = 0;
            int minCol     = 0;
            int maxCol     = 0;

            for (int row = 0; row < image.Rows; row++)
            {
                for (int col = 0; col < image.Cols; col++)
                {
                    if (points[row, col, 0] > 0 || points[row, col, 1] > 0 || points[row, col, 2] > 0)
                    {
                        if (minCol == 0 || col < minCol)
                        {
                            minCol = col;
                        }

                        if (col > maxCol)
                        {
                            maxCol = col;
                        }
                        if (sum++ > (int)area / 2)
                        {
                            cuttingRow = row;
                        }
                    }
                }

                if (cuttingRow > 0)
                {
                    break;
                }

                minCol = 0;
                maxCol = 0;
            }

            var color = new MCvScalar(0, 255, 0);

            CvInvoke.Line(mat, new Point(minCol, cuttingRow), new Point(maxCol, cuttingRow), color, 10);
            var newImage = mat.ToImage <Rgb, Byte>();
            var fileName = "/storage/emulated/0/Android/data/Camera2Basic.Camera2Basic/files/pic.jpg";

            newImage.ToBitmap().Save(fileName);
            return(fileName);
        }
Пример #6
0
        public void CalibrateCV(ChessBoard cb, out PinholeCamera[] cameras)
        {
            var worldCoordinates = cb.boardLocalCoordinates_cv;

            List <List <Point3f> > worldpoints = new List <List <Point3f> >();

            for (int i = 0; i < images.Count; i++)
            {
                worldpoints.Add(cb.boardLocalCoordinates_cv.ToList());
            }

            double[,] cameraMat2 = new double[3, 3];

            var imagepoints = images.Select(x => x.ImagePoints);



            Matrix cameramat  = new Matrix(3, 3);
            Matrix distcoeffs = new Matrix(4, 1);

            Mat[] rvecs, tvecs;

            CVI.CalibrateCamera(worldpoints.Select(x => x.ToArray()).ToArray(), imagepoints.ToArray(), images.First().imageSize,
                                cameramat, distcoeffs, CalibType.Default, new MCvTermCriteria(),
                                out rvecs, out tvecs);

            cameras = new PinholeCamera[images.Count];
            for (int i = 0; i < rvecs.Length; i++)
            {
                var rvec = rvecs[i];
                var tvec = tvecs[i];
                cameras[i] = new PinholeCamera();
                var cam = cameras[i];
                var rot = new RotationVector3D();
                rvec.CopyTo(rot);

                var worldMat = new Matrix4d();
            }


            for (int i = 0; i < cameras.Length; i++)
            {
                worldpoints.Add(cb.boardLocalCoordinates_cv.ToList());
            }
        }
 /// <summary>
 /// Pre-computes the undistortion map - coordinates of the corresponding pixel in the distorted image for every pixel in the corrected image. Then, the map (together with input and output images) can be passed to cvRemap function.
 /// </summary>
 /// <param name="width">The width of the image</param>
 /// <param name="height">The height of the image</param>
 /// <param name="mapx">The output array of x-coordinates of the map</param>
 /// <param name="mapy">The output array of y-coordinates of the map</param>
 public void InitUndistortMap(int width, int height, out Matrix <float> mapx, out Matrix <float> mapy)
 {
     mapx = new Matrix <float>(height, width);
     mapy = new Matrix <float>(height, width);
     CvInvoke.cvInitUndistortMap(IntrinsicMatrix.Ptr, DistortionCoeffs.Ptr, mapx, mapy);
 }
Пример #8
0
        /// <summary>
        /// Get a sequence of motion component
        /// </summary>
        /// <returns>A sequence of motion components</returns>
        public void GetMotionComponents(IOutputArray segMask, VectorOfRect boundingRects)
        {
            TimeSpan ts = _lastTime.Subtract(_initTime);

            CvInvoke.SegmentMotion(_mhi, segMask, boundingRects, ts.TotalSeconds, _maxTimeDelta);
        }
Пример #9
0
 /// <summary>
 /// Rotate the Affine3 matrix by a Rodrigues vector
 /// </summary>
 /// <param name="r0">Value of the Rodrigues vector</param>
 /// <param name="r1">Value of the Rodrigues vector</param>
 /// <param name="r2">Value of the Rodrigues vector</param>
 /// <returns>The rotated Affine3 matrix</returns>
 public Affine3d Rotate(double r0, double r1, double r2)
 {
     return(new Affine3d(CvInvoke.cveAffine3dRotate(_ptr, r0, r1, r2)));
 }
        public MainForm()
        {
            InitializeComponent();
            buttonPlay.Enabled  = false;
            _isMetricFormOpened = false;

            _frames              = new Frame[5];
            _effectsForm         = new EffectsForm(_frames);
            _effectsForm.Visible = false;
            _effectsForm.Show();
            _effectsForm.Hide();

            _metricsForm         = new MetricsForm();
            _metricsForm.Visible = false;
            _metricsForm.Show();
            _metricsForm.Hide();
            _metricsForm.Closing += (o, args) => {
                _isMetricFormOpened = false;
            };


            // get all images from C:\Users\Anne\Documents\MATLAB\distTrans\raw_image
            var p = "./640117___2.jpg";
            {
                Image <Bgr, Byte> img      = new Image <Bgr, Byte>("./640117___2.jpg");
                Mat[]             elements =
                {
                    CvInvoke.GetStructuringElement(ElementShape.Cross,     new Size(2, 2), new Point(-1, -1)),
                    CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(2, 2), new Point(-1, -1))
                };


                for (int elementId = 0; elementId < elements.Length; elementId++)
                {
                    var stage = new Image <Gray, byte>(img.Bitmap);
                    for (int i = 0; i < 1; i++)
                    {
                        CvInvoke.Dilate(stage, stage, elements[elementId], new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0));
                        stage.Save("./" + elementId + "__" + i + ".jpg");
                    }

                    for (int i = 5; i < 10; i++)
                    {
                        CvInvoke.Erode(stage, stage, elements[elementId], new Point(-1, -1), 1, BorderType.Default, new MCvScalar(0));
                        stage.Save("./" + elementId + "__" + i + ".jpg");
                    }
                }
            }
            //Application.Exit();
            //return;
            string        directory  = "./";
            List <string> imagePaths = GetImagesPath(directory);

            foreach (string imgPath in imagePaths)
            {
                if (imgPath.EndsWith(".jpg") || imgPath.EndsWith(".png"))
                {
                    try
                    {
                        Image <Bgr, Byte> img   = new Image <Bgr, Byte>(imgPath);
                        List <Rectangle>  rects = DetectLetters(img);
                        foreach (Rectangle rect in rects)
                        {
                            img.Draw(rect, new Bgr(0, 255, 0), 3);
                        }

                        string direct   = Path.GetDirectoryName(imgPath);
                        string fileName = Path.GetFileName(imgPath);
                        string path     = GetFilePath(direct + "\\output\\", fileName);
                        try
                        {
                            Console.Write(path);
                            img.Save(path);
                        }
                        catch (Exception e)
                        {
                            //...
                        }
                    }
                    catch (Exception e)
                    {
                        //...
                    }
                }
            }
        }
Пример #11
0
        public static unsafe void ceresSolveAruco()
        {
            var                phc          = PinholeCamera.getTestCameraHuawei();
            string             dir          = @"C:\Users\jens\Desktop\calibratie\Huawei p9\aruco\stereo test\";
            List <CeresMarker> ceresmarkers = new List <CeresMarker>();
            List <CeresCamera> cerescameras = new List <CeresCamera>();
            var                files        = Directory.GetFiles(dir).ToList();


            //8 punten nodig
            var markerDictionary = Aruco.findArucoMarkers(files, Path.Combine(dir, "aruco_detected\\"), 1);
            var pairs            = findImagePairsMinMarkers(markerDictionary, 8);

            Matrix K = new Matrix(phc.Intrinsics.Mat);

            var W = new Matrix(new double[] {
                0.0D, -1.0D, 0.0D,
                1.0D, 0.0D, 0.0D,
                0.0D, 0.0D, 1.0D
            });


            var Wt = new Matrix(new double[] {
                0.0D, 1.0D, 0.0D,
                -1.0D, 0.0D, 0.0D,
                0.0D, 0.0D, 1.0D
            });
            var Z = new Matrix(new double[] {
                0.0D, 1.0D, 0.0D,
                -1.0D, 0.0D, 0.0D,
                0.0D, 0.0D, 0D
            });

            var diag = new Matrix(new double[] {
                1.0D, 0.0D, 0.0D,
                0.0D, 1.0D, 0.0D,
                0.0D, 0.0D, 0.0D
            });


            foreach (var stereoPair in pairs)
            {
                var            points_count = stereoPair.intersection.Count;
                VectorOfPointF punten1px, punten2px;
                {
                    int           i  = 0;
                    List <PointF> p1 = new List <PointF>();
                    List <PointF> p2 = new List <PointF>();
                    foreach (KeyValuePair <ArucoMarker, ArucoMarker> kvp in stereoPair.intersection)
                    {
                        p1.Add(kvp.Key.Corner1);
                        p2.Add(kvp.Value.Corner1);
                        i++;
                    }
                    punten1px = new VectorOfPointF(p1.ToArray());
                    punten2px = new VectorOfPointF(p2.ToArray());
                }


                Matrix F      = new Matrix(3, 3);
                CVI.FindFundamentalMat(punten1px, punten2px, F);



                Matrix essential = K.Transpose() * F * K;
                var    decomp = new SVD <double>(essential);
                var    U      = decomp.U;
                var    Vt     = decomp.Vt;

                var R1 = U * W * Vt;
                var R2 = U * W.Transpose() * Vt;
                var T1 = U.GetCol(2);
                var T2 = -1 * U.GetCol(2);

                Matrix[] Ps = new Matrix[4];

                for (int i = 0; i < 4; i++)
                {
                    Ps[i] = new Matrix(3, 4);
                }

                CVI.HConcat(R1, T1, Ps[0]);
                CVI.HConcat(R1, T2, Ps[1]);
                CVI.HConcat(R2, T1, Ps[2]);
                CVI.HConcat(R2, T2, Ps[3]);

                var KPs = new Matrix[4];
                KPs[0] = K * Ps[0];
                KPs[1] = K * Ps[1];
                KPs[2] = K * Ps[2];
                KPs[3] = K * Ps[3];


                var KP0 = K * new Matrix(new double [, ] {
                    { 1, 0, 0, 0 }, { 0, 1, 0, 0 }, { 0, 0, 1, 0 }
                });

                for (int i = 0; i < 4; i++)
                {
                    Matrix <float>     output_hom = new Matrix <float>(4, punten1px.Size);
                    VectorOfPoint3D32F output_3d  = new VectorOfPoint3D32F();

                    CVI.TriangulatePoints(KP0, KPs[i], punten1px, punten2px, output_hom);
                    CVI.ConvertPointsFromHomogeneous(output_hom, output_3d);
                }

                Matrix S = U * diag * W * U.Transpose();

                Matrix R = U * W * decomp.Vt;
            }
        }
Пример #12
0
 /// <summary>
 /// Set the value of the quaternions using euler angle
 /// </summary>
 /// <param name="x">Rotation around x-axis (roll) in radian</param>
 /// <param name="y">Rotation around y-axis (pitch) in radian</param>
 /// <param name="z">rotation around z-axis (yaw) in radian</param>
 public void SetEuler(double x, double y, double z)
 {
     CvInvoke.eulerToQuaternions(x, y, z, ref this);
 }
Пример #13
0
 /// <summary>
 /// moves iterator to the next node
 /// </summary>
 public void Next()
 {
     CvInvoke.cveFileNodeIteratorNext(_ptr);
 }
Пример #14
0
 /// <summary>
 /// Check if the current iterator equals to the other.
 /// </summary>
 /// <param name="iterator">The other iterator to compares with.</param>
 /// <returns>True if the current iterator equals to the other</returns>
 public bool Equals(FileNodeIterator iterator)
 {
     return CvInvoke.cveFileNodeIteratorEqualTo(_ptr, iterator);
 }
Пример #15
0
 /// <summary>
 /// Create a FileNodeIterator from a specific node.
 /// </summary>
 /// <param name="node">the collection to iterate over</param>
 /// <param name="seekEnd">True if iterator needs to be set after the last element of the node</param>
 public FileNodeIterator(FileNode node, bool seekEnd)
 {
     _ptr = CvInvoke.cveFileNodeIteratorCreateFromNode(node, seekEnd);
 }
Пример #16
0
 /// <summary>
 /// Create a blank file node iterator
 /// </summary>
 internal FileNodeIterator()
 {
     _ptr = CvInvoke.cveFileNodeIteratorCreate();
 }
Пример #17
0
 /// <summary>
 /// Computes a background image.
 /// </summary>
 /// <param name="backgroundImage">The output background image</param>
 /// <param name="subtractor">The background subtractor</param>
 /// <remarks> Sometimes the background image can be very blurry, as it contain the average background statistics.</remarks>
 public static void GetBackgroundImage(this IBackgroundSubtractor subtractor, IOutputArray backgroundImage)
 {
     using (OutputArray oaBackgroundImage = backgroundImage.GetOutputArray())
         CvInvoke.cveBackgroundSubtractorGetBackgroundImage(subtractor.BackgroundSubtractorPtr, oaBackgroundImage);
 }
Пример #18
0
 /// <summary>
 /// Update the background model
 /// </summary>
 /// <param name="image">The image that is used to update the background model</param>
 /// <param name="learningRate">Use -1 for default</param>
 /// <param name="subtractor">The background subtractor</param>
 /// <param name="fgMask">The output foreground mask</param>
 public static void Apply(this IBackgroundSubtractor subtractor, IInputArray image, IOutputArray fgMask, double learningRate = -1)
 {
     using (InputArray iaImage = image.GetInputArray())
         using (OutputArray oaFgMask = fgMask.GetOutputArray())
             CvInvoke.cveBackgroundSubtractorUpdate(subtractor.BackgroundSubtractorPtr, iaImage, oaFgMask, learningRate);
 }
Пример #19
0
 /// <summary>
 /// Translate the Affine3 matrix by the given value
 /// </summary>
 /// <param name="t0">Value of the translation vector</param>
 /// <param name="t1">Value of the translation vector</param>
 /// <param name="t2">Value of the translation vector</param>
 /// <returns>The translated Affine3 matrix</returns>
 public Affine3d Translate(double t0, double t1, double t2)
 {
     return(new Affine3d(CvInvoke.cveAffine3dTranslate(_ptr, t0, t1, t2)));
 }
Пример #20
0
 /// <summary>
 /// Fill the (3x3) rotation matrix with the value such that it represent the quaternions
 /// </summary>
 /// <param name="rotation">The (3x3) rotation matrix which values will be set to represent this quaternions</param>
 public void GetRotationMatrix(Matrix <double> rotation)
 {
     CvInvoke.quaternionsToRotationMatrix(ref this, rotation);
 }
Пример #21
0
 /// <summary>
 /// Rotate the points in <paramref name="pointsSrc"/> and save the result in <paramref name="pointsDst"/>. Inplace operation is supported (<paramref name="pointsSrc"/> == <paramref name="pointsDst"/>).
 /// </summary>
 /// <param name="pointsSrc">The points to be rotated</param>
 /// <param name="pointsDst">The result of the rotation, should be the same size as <paramref name="pointsSrc"/>, can be <paramref name="pointsSrc"/> as well for inplace rotation</param>
 public void RotatePoints(Matrix <double> pointsSrc, Matrix <double> pointsDst)
 {
     CvInvoke.quaternionsRotatePoints(ref this, pointsSrc, pointsDst);
 }
Пример #22
0
 /// <summary>
 /// Get the currently observed element
 /// </summary>
 /// <returns>The currently observed element</returns>
 public FileNode GetFileNode()
 {
     return new FileNode(CvInvoke.cveFileNodeIteratorGetFileNode(_ptr));
 }
Пример #23
0
 /// <summary>
 /// Get the equaivalent euler angle
 /// </summary>
 /// <param name="x">Rotation around x-axis (roll) in radian</param>
 /// <param name="y">Rotation around y-axis (pitch) in radian</param>
 /// <param name="z">rotation around z-axis (yaw) in radian</param>
 public void GetEuler(ref double x, ref double y, ref double z)
 {
     CvInvoke.quaternionsToEuler(ref this, ref x, ref y, ref z);
 }
Пример #24
0
 /// <summary>
 /// Release the unmanaged resources
 /// </summary>
 protected override void DisposeObject()
 {
     if (_ptr != IntPtr.Zero)
         CvInvoke.cveFileNodeIteratorRelease(ref _ptr);
 }
Пример #25
0
 /// <summary>
 /// Create an empty Affine3, double precision matrix
 /// </summary>
 public Affine3d()
 {
     _ptr = CvInvoke.cveAffine3dCreate();
 }
Пример #26
0
 /// <summary>
 /// Creates MergeDebevec object.
 /// </summary>
 public MergeDebevec()
 {
     _ptr = CvInvoke.cveMergeDebevecCreate(ref _mergeExposuresPtr, ref _sharedPtr);
 }
 /// <summary>
 /// Create a spill tree from the specific feature descriptors
 /// </summary>
 /// <param name="descriptors">The array of feature descriptors</param>
 /// <param name="naive">A good value is 50</param>
 /// <param name="rho">A good value is .7</param>
 /// <param name="tau">A good value is .1</param>
 public FeatureTree(Matrix <float> descriptors, int naive, double rho, double tau)
 {
     _descriptorMatrix = descriptors.Clone();
     _ptr = CvInvoke.cvCreateSpillTree(_descriptorMatrix.Ptr, naive, rho, tau);
 }
Пример #28
0
 /// <summary>
 /// Creates MergeMertens object.
 /// </summary>
 /// <param name="contrastWeight">contrast measure weight.</param>
 /// <param name="saturationWeight">saturation measure weight</param>
 /// <param name="exposureWeight">well-exposedness measure weight</param>
 public MergeMertens(float contrastWeight = 1.0f, float saturationWeight = 1.0f, float exposureWeight = 0.0f)
 {
     _ptr = CvInvoke.cveMergeMertensCreate(contrastWeight, saturationWeight, exposureWeight, ref _mergeExposuresPtr, ref _sharedPtr);
 }
Пример #29
0
 /// <summary>
 /// Creates MergeRobertson object.
 /// </summary>
 public MergeRobertson()
 {
     _ptr = CvInvoke.cveMergeRobertsonCreate(ref _mergeExposuresPtr, ref _sharedPtr);
 }
Пример #30
0
 /// <summary>
 /// Create a new identity matrix
 /// </summary>
 /// <returns>The identity affine 3d matrix</returns>
 public static Affine3d Identity()
 {
     return(new Affine3d(CvInvoke.cveAffine3dGetIdentity()));
 }