Пример #1
0
        private void rotatePhotos(object parameters)
        {
            object[] paramsArray = (object[])parameters;
            List<string> fileNames = (List<string>)paramsArray[0];
            PointF rotationCenter = (PointF)paramsArray[1];
            Bitmap referencePic = new Bitmap(fileNames.First());
            Image<Bgr, Byte> referenceImage = new Image<Bgr, Byte>(referencePic);

            byte[] timeTakenRaw = referencePic.GetPropertyItem(36867).Value;
            string timeTaken = System.Text.Encoding.ASCII.GetString(timeTakenRaw, 0, timeTakenRaw.Length - 1);
            DateTime referenceTime = DateTime.ParseExact(timeTaken, "yyyy:MM:d H:m:s", System.Globalization.CultureInfo.InvariantCulture);

            referencePic.Dispose();

            Bgr background = new Bgr(0, 0, 0);

            foreach (string filename in fileNames)
            {
                Bitmap currentPic = new Bitmap(filename);
                timeTakenRaw = currentPic.GetPropertyItem(36867).Value;
                timeTaken = System.Text.Encoding.ASCII.GetString(timeTakenRaw, 0, timeTakenRaw.Length - 1);
                DateTime date = DateTime.ParseExact(timeTaken, "yyyy:MM:d H:m:s", System.Globalization.CultureInfo.InvariantCulture);
                double secondsShift = (date - referenceTime).TotalSeconds;
                double rotationAngle = secondsShift / stellarDay * 360;
                RotationMatrix2D<double> rotationMatrix = new RotationMatrix2D<double>(rotationCenter, -rotationAngle, 1);

                using (Image<Bgr, Byte> rotatedImage = new Image<Bgr, Byte>(currentPic))
                {
                    referenceImage = referenceImage.Max(rotatedImage.WarpAffine<double>(rotationMatrix, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC, Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS, background));
                }
                pictureProcessed(this, new PictureProcessedEventArgs(referenceImage));
                currentPic.Dispose();
            }
            pictureProcessingComplete(this, new EventArgs());
        }
        static Image <Rgb, byte> deskew(Image <Rgb, byte> image)
        {
            Image <Gray, byte> image2 = image.Convert <Gray, byte>();

            //List<LineSegment2D> lines = getLines(image2, 200);
            //image = image2.Convert<Rgb, byte>();
            //foreach (var l in lines)
            //    image.Draw(l, new Rgb(255, 0, 0), 1);
            //return image;

            double angle;

            //angle = getDeskewAngleByLines(image2);
            //if (angle < -360)
            angle = getDeskewAngleByLongestBlock(image2);

            if (angle == 0)
            {
                return(image);
            }
            RotationMatrix2D rotationMat = new RotationMatrix2D();

            CvInvoke.GetRotationMatrix2D(new PointF((float)image.Width / 2, (float)image.Height / 2), angle, 1, rotationMat);
            Image <Rgb, byte> image3 = new Image <Rgb, byte>(image.Size);

            CvInvoke.WarpAffine(image, image3, rotationMat, image.Size);
            return(image3);
        }
Пример #3
0
        private void FrmGeometricRotate_PassValuesEvent(object sender, FunctionArgs.GeometricRotateArgs e)
        {
            RotationMatrix2D matrix = new RotationMatrix2D();

            CvInvoke.GetRotationMatrix2D(new PointF(e.RotateX, e.RotateY), e.RotateAngle, e.RotateScale, matrix);
            CvInvoke.WarpAffine(mCurrentImage, mTempImage, matrix, new Size(mCurrentImage.Width, mCurrentImage.Height));
            mFrmMainImage.SetImageSource(mTempImage);
        }
Пример #4
0
        public Mat RotateMat(Mat source, double angle)
        {
            PointF src_center = new PointF(source.Cols / 2.0F, source.Rows / 2.0F);
            Mat    rot_mat    = new RotationMatrix2D(src_center, angle, 1.0);
            Mat    dst        = new Mat();

            CvInvoke.WarpAffine(source, dst, rot_mat, source.Size);
            return(dst);
        }
Пример #5
0
        public static Mat RotateImage(Mat src, double angle)
        {
            //var src = CvInvoke.Imread(file);
            Mat dst = new Mat();
            var rot = new RotationMatrix2D(new PointF(src.Width / 2.0f, src.Height / 2.0f), angle, 1);

            CvInvoke.WarpAffine(src, dst, rot, src.Size);
            return(dst);
        }
Пример #6
0
        static Image <Rgb, byte> deskew(Image <Rgb, byte> image) //good
        {                                                        //https://becominghuman.ai/how-to-automatically-deskew-straighten-a-text-image-using-opencv-a0c30aed83df
            Image <Gray, byte> image2 = image.Convert <Gray, byte>();

            CvInvoke.BitwiseNot(image2, image2);                      //to negative
            CvInvoke.GaussianBlur(image2, image2, new Size(9, 9), 0); //remove small spots
            CvInvoke.Threshold(image2, image2, 125, 255, ThresholdType.Otsu | ThresholdType.Binary);
            Mat se = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(30, 5), new Point(-1, -1));

            CvInvoke.Dilate(image2, image2, se, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
            //Emgu.CV.CvInvoke.Erode(image, image, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();

            CvInvoke.FindContours(image2, contours, hierarchy, RetrType.External, ChainApproxMethod.ChainApproxSimple);
            if (contours.Size < 1)
            {
                return(null);
            }
            int    maxW  = 0;
            double angle = 0;

            for (int i = 0; i < contours.Size; i++)
            {
                RotatedRect rr = CvInvoke.MinAreaRect(contours[i]);
                Rectangle   r  = rr.MinAreaRect();
                int         w  = r.Width > r.Height ? r.Width : r.Height;
                if (maxW < w)
                {
                    maxW  = w;
                    angle = rr.Angle;
                }
            }
            if (angle > 45)
            {
                angle -= 90;
            }
            else if (angle < -45)
            {
                angle += 90;
            }
            RotationMatrix2D rotationMat = new RotationMatrix2D();

            CvInvoke.GetRotationMatrix2D(new PointF((float)image.Width / 2, (float)image.Height / 2), angle, 1, rotationMat);
            Image <Rgb, byte> image3 = new Image <Rgb, byte>(image.Size);

            CvInvoke.WarpAffine(image, image3, rotationMat, image.Size);
            return(image3);
        }
Пример #7
0
        private void deSkewBtn_Click(object sender, EventArgs e)
        {
            double phi1 = Math.Atan((pageMarks[1].Y - pageMarks[0].Y) / (pageMarks[1].X - pageMarks[0].X));

            Console.WriteLine("Deskew angle phi: " + phi1);
            double           phi2   = Math.Atan((pageMarks[2].Y - pageMarks[3].Y) / (pageMarks[2].X - pageMarks[3].X));
            double           phi    = (phi1 + phi2) / 2;
            RotationMatrix2D rotMat = new RotationMatrix2D();

            CvInvoke.GetRotationMatrix2D(pageMarks[0], phi, 1, rotMat);
            CvInvoke.WarpAffine(thresholded, thresholded, rotMat, thresholded.Size);
            CvInvoke.WarpAffine(image, image, rotMat, thresholded.Size);
            CvInvoke.WarpAffine(colorImage, colorImage, rotMat, thresholded.Size);
            CvInvoke.WarpAffine(wthreshold, wthreshold, rotMat, thresholded.Size);
            cropImage();
        }
 static Image<Gray, byte> ExtractStikyNoteRegion(Image<Gray, byte> inputImg,float markerPosX,float markerPosY,float markerOrientation)
 {
     Image<Gray, byte> toBeModified = inputImg;
     //inputImg.Draw(new CircleF(new PointF(markerPosX, markerPosY), 2), new Bgr(0, 0, 255), 2);
     //inputImg.Save("Raw.bmp");
     //CvInvoke.cvShowImage("Source", inputImg);
     //angle here in counter-clockwise direction
     float angleInDegree = Utilities.RadToDegree(markerOrientation);
     //toBeModified = toBeModified.Rotate(angleInDegree + 90, new Bgr(255, 255, 255));
     RotationMatrix2D<float> rotMat = new RotationMatrix2D<float>(new PointF(markerPosX, markerPosY), angleInDegree - 90, 1);
     toBeModified = toBeModified.WarpAffine<float>(rotMat, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC, Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS, new Gray(255));
        /* PointF rotOrigin = new PointF(inputImg.Width / 2, inputImg.Height / 2);
     PointF rotatedPos = Utilities.rotatePoint(new PointF(markerPosX,markerPosY),
                         markerOrientation + Utilities.DegreeToRad(90), rotOrigin);
     toBeModified.ROI = new Rectangle((int)rotatedPos.X - MARKER_SIZE / 4, (int)rotatedPos.Y - MARKER_SIZE / 4,
                         NOTE_SIZE - MARKER_SIZE / 2, NOTE_SIZE - MARKER_SIZE / 2);*/
     toBeModified.ROI = new Rectangle((int)markerPosX - MARKER_SIZE / 4, (int)markerPosY - MARKER_SIZE / 4,
                         NOTE_SIZE - MARKER_SIZE / 2, NOTE_SIZE - MARKER_SIZE / 2);
     return toBeModified.Copy();
 }
        public Image <Gray, byte> Normalize(Image <Bgr, byte> originalImage, int targetWidth = 512, int targetHeight = 512)
        {
            var eyes = eyesDector.DetectEyes(originalImage);
            //DrawEyesRect(originalImage, eyes);
            // detect angle respect to horizontal line of eyes
            var dy            = eyes.Right.GetCenter().Y - eyes.Left.GetCenter().Y;
            var dx            = eyes.Right.GetCenter().X - eyes.Left.GetCenter().X;
            var rotationAngle = RadToDegree(Math.Atan2(dy, dx)); // angle
            var eyesCenterX   = (eyes.Left.GetCenter().X + eyes.Right.GetCenter().X) / 2;
            var eyesCenterY   = (eyes.Left.GetCenter().Y + eyes.Right.GetCenter().Y) / 2;

            // determine the right target scale
            var dist        = Math.Sqrt(dx * dx + dy * dy);
            var targetDist  = (1.0 - 2 * DESIDERED_LEFT_EYE_X) * targetWidth;
            var targetScale = targetDist / dist;

            // determine the translation in order to center eyes into cropped image
            var tx = (targetWidth * 0.5) - eyesCenterX;
            var ty = (targetHeight * DESIDERED_LEFT_EYE_Y) - eyesCenterY;

            // create affine matrix
            using (var rotationMatrix = new RotationMatrix2D(new PointF(eyesCenterX, eyesCenterY), rotationAngle, targetScale))
                using (var affineMatrix = new Matrix <double>(rotationMatrix.Rows, rotationMatrix.Cols, rotationMatrix.DataPointer))
                {
                    // add the translation component to the rotation matrix
                    affineMatrix.SetCellValue(0, 2, affineMatrix.GetCellValue(0, 2) + tx);
                    affineMatrix.SetCellValue(1, 2, affineMatrix.GetCellValue(1, 2) + ty);

                    var resized = originalImage.WarpAffine(affineMatrix.Mat,
                                                           targetWidth,
                                                           targetHeight,
                                                           Inter.Area,
                                                           Warp.Default,
                                                           BorderType.Constant,
                                                           new Bgr()
                                                           );

                    //ImageViewer.Show(resized);
                    return(resized.Convert <Gray, byte>());
                }
        }
Пример #10
0
        /// <summary>
        /// Rotate Image
        /// </summary>
        /// <param name="angle">Degrees Of Rotation</param>
        public Mat GetRotatedImage(double angle)
        {
            if (angle < 0 || angle >= 360)
            {
                throw new ArgumentException("angle");
            }

            if (_rotatedImage.image == null || _rotatedImage.angle != angle)
            {
                if (_rotatedImage.image != null)
                {
                    _rotatedImage.image.Dispose();
                    _rotatedImage.image = null;
                }

                if (angle == 0)
                {
                    _rotatedImage.image = _image.Clone();
                    _rotatedImage.angle = angle;
                }
                else
                {
                    PointF src_center = new PointF(_image.Cols / 2.0F, _image.Rows / 2.0F);

                    using (Mat rot_mat = new RotationMatrix2D(src_center, angle, 1.0))
                    {
                        using (Mat destinationImage = new Mat())
                        {
                            CvInvoke.WarpAffine(_image, destinationImage, rot_mat, _image.Size, interpMethod: Inter.Cubic);

                            _rotatedImage.image = destinationImage.Clone();
                            _rotatedImage.angle = angle;
                        }
                    }
                }
            }

            return(_rotatedImage.image.Clone());
        }
Пример #11
0
        //! Funkcja wyznaczaROI i obraca algorytm
        /*! Działanie funkcji można poisać w kokach
         *  1. Wytnij z obrazu ROI ograniczono przez zewnętrzną ramkę piktogramu
         *  2. Pobierz kąt obrotu markera UWAGA - kat w zakresie  +-[0-90] 
         *  3. Pobierz współrzędne środka markera oraz środka prostokata.
         *  4. Obróc marker do jednej z czterech podstawowych poozycji
         *  5. Oblicz d, które określa o ile należy pomiejszyć ROI by usunąc
         *  pole zawarte pomiędzy wewnetrzą ramką a obrazem
         *  6. Dodaj marker do listy, zapisująć kontury ramki zewnętrznej i wewnętrznej
         *  7. Oblicz, na podstawie konturu, kąt obrotu markera do Pozycji podstawowej.
        */
        public void rot4MarkerAndCreateMarkerElem(int index)
        {
            Image<Gray, Byte> roi = grayImage.Copy(
                markers[index].getIncludeSquere().getContour().BoundingRectangle);


            PointF center = markers[index].getIncludeSquere().getCenter();
            PointF centerTopRect = markers[index].getRectangle().getCenter();

            double angle = calculateAngle360(center, centerTopRect);
            RotationMatrix2D<float> rotateBasePos = new RotationMatrix2D<float>(
                new PointF(roi.Width / 2, roi.Height / 2), angle, 1.1);
            double d = Math.Sqrt(markers[index].getIncludeSquere().getArea());
            double x = 0.125 * d;
            Image<Gray, Byte> rot = roi.WarpAffine(rotateBasePos,
                                     Emgu.CV.CvEnum.INTER.CV_INTER_AREA,
                                     Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS
                                     , new Gray(0));
            Rectangle rectInternal = new Rectangle(rot.Width / 2 - (int)((d / 2) - x),
                          rot.Height / 2 - (int)((d / 2) - x), (int)(d - 2 * x), (int)(d - 2 * x));

         
          markers[index].setSymbolImage(rot.Copy(rectInternal));
          markers[index].setRotateAngle(angle);

         
        }
Пример #12
0
 /// <summary>
 /// Generate a random point cloud around the ellipse. 
 /// </summary>
 /// <param name="e">The region where the point cloud will be generated. The axes of e corresponds to std of the random point cloud.</param>
 /// <param name="numberOfPoints">The number of points to be generated</param>
 /// <returns>A random point cloud around the ellipse</returns>
 public static PointF[] GeneratePointCloud(Ellipse e, int numberOfPoints)
 {
    PointF[] cloud = new PointF[numberOfPoints];
    GCHandle handle = GCHandle.Alloc(cloud, GCHandleType.Pinned);
    using (Matrix<float> points = new Matrix<float>(numberOfPoints, 2, handle.AddrOfPinnedObject()))
    using (Matrix<float> xValues = points.GetCol(0))
    using (Matrix<float> yValues = points.GetCol(1))
    using (RotationMatrix2D<float> rotation = new RotationMatrix2D<float>(e.MCvBox2D.center, e.MCvBox2D.angle, 1.0))
    {
       xValues.SetRandNormal(new MCvScalar(e.MCvBox2D.center.X), new MCvScalar(e.MCvBox2D.size.Width / 2.0f));
       yValues.SetRandNormal(new MCvScalar(e.MCvBox2D.center.Y), new MCvScalar(e.MCvBox2D.size.Height / 2.0f));
       rotation.RotatePoints(points);
    }
    handle.Free();
    return cloud;
 }
Пример #13
0
        public void rot4MarkerAndCreateMarkerElem(int indexPM)
        {
            Image<Gray, Byte> roi = grayImage.Copy(
               contourPassibleMarkers.getContourAt(
               possibleMarkers.getNumberInternalFrameAt(indexPM))
                                            .BoundingRectangle);
               double angle =contourPassibleMarkers.getAngleAt(
                  possibleMarkers.getNumberInternalFrameAt(indexPM));

               listPkt.Clear();
               PointF center = contourPassibleMarkers.getCenterAt(
                    possibleMarkers.getNumberBaseAt(indexPM));
               PointF centerTopRect = contourPassibleMarkers.getCenterAt(
                    possibleMarkers.getNumberTopRectAt(indexPM));
              RotationMatrix2D<float> rotateBasePos = new RotationMatrix2D<float>(
              new PointF(roi.Width/2,roi.Height/2),angle,1);
              double d=   Math.Sqrt(contourPassibleMarkers.getContourAreaAt(indexPM) );
                     double x = 0.125 * d;
            Image<Gray, Byte> rot= roi.WarpAffine(rotateBasePos,
                                 Emgu.CV.CvEnum.INTER.CV_INTER_AREA,
                                 Emgu.CV.CvEnum.WARP.CV_WARP_FILL_OUTLIERS
                                 ,new Gray(0));
            Rectangle rectInternal= new Rectangle(rot.Width/2-(int)((d/2)-x),
                      rot.Height/2-(int)((d/2)-x),(int)(d-2*x),(int)(d-2*x));

            markers.Add(new Marker(
            contourPassibleMarkers.getContourAt(possibleMarkers.getNumberInternalFrameAt(indexPM)),
            contourPassibleMarkers.getContourAt(possibleMarkers.getNumberBaseAt(indexPM)),
               rot.Copy(rectInternal)));
                 markers.Last().angle=calculateAngle(center, centerTopRect, angle);
        }
Пример #14
0
      /*
      /// <summary>
      /// Re-project pixels on a 1-channel disparity map to array of 3D points.
      /// </summary>
      /// <param name="disparity">Disparity map</param>
      /// <param name="Q">The re-projection 4x4 matrix, can be arbitrary, e.g. the one, computed by cvStereoRectify</param>
      /// <returns>The reprojected 3D points</returns>
      public static MCvPoint3D32f[] ReprojectImageTo3D(Image<Gray, Byte> disparity, Matrix<double> Q)
      {
         Size size = disparity.Size;
         MCvPoint3D32f[] points3D = new MCvPoint3D32f[size.Width * size.Height];
         GCHandle handle = GCHandle.Alloc(points3D, GCHandleType.Pinned);

         using (Matrix<float> pts = new Matrix<float>(size.Height, size.Width, 3, handle.AddrOfPinnedObject(), 0))
            CvInvoke.ReprojectImageTo3D(disparity, pts, Q, false, CvEnum.DepthType.Cv32F);

         handle.Free();
         return points3D;
      }*/

      
      /// <summary>
      /// Generate a random point cloud around the ellipse. 
      /// </summary>
      /// <param name="e">The region where the point cloud will be generated. The axes of e corresponds to std of the random point cloud.</param>
      /// <param name="numberOfPoints">The number of points to be generated</param>
      /// <returns>A random point cloud around the ellipse</returns>
      public static PointF[] GeneratePointCloud(Ellipse e, int numberOfPoints)
      {
         PointF[] cloud = new PointF[numberOfPoints];
         GCHandle handle = GCHandle.Alloc(cloud, GCHandleType.Pinned);
         using (Matrix<float> points = new Matrix<float>(numberOfPoints, 2, handle.AddrOfPinnedObject()))
         using (Matrix<float> xValues = points.GetCol(0))
         using (Matrix<float> yValues = points.GetCol(1))
         using (RotationMatrix2D rotation = new RotationMatrix2D(e.RotatedRect.Center, e.RotatedRect.Angle, 1.0))
         using (Mat tmp = new Mat())
         {
            rotation.ConvertTo(tmp, DepthType.Cv32F);
            xValues.SetRandNormal(new MCvScalar(e.RotatedRect.Center.X), new MCvScalar(e.RotatedRect.Size.Width / 2.0f));
            yValues.SetRandNormal(new MCvScalar(e.RotatedRect.Center.Y), new MCvScalar(e.RotatedRect.Size.Height / 2.0f));
            rotation.RotatePoints(points);
         }
         handle.Free();
         return cloud;
      }
Пример #15
0
        /// <summary>
        /// Determine if the countor represents a card.
        /// </summary>
        public bool TryFindCard(Guid cardId, Size fieldImageSize, VectorOfPoint countor, out Mat result)
        {
            result = null;

            RotatedRect rotatedRect = CvInvoke.MinAreaRect(countor);

            // Prevent Divide By Zero
            if (rotatedRect.Size.Width == 0)
            {
                return(false);
            }

            float angle           = 0F;
            float width           = rotatedRect.Size.Width;
            float height          = rotatedRect.Size.Height;
            float area            = width * height;
            float heightRatio     = rotatedRect.Size.Height / (float)fieldImageSize.Height;
            float widthRatio      = rotatedRect.Size.Width / (float)fieldImageSize.Width;
            float relativeCenterX = rotatedRect.Center.X / (float)fieldImageSize.Width;
            float relativeCenterY = rotatedRect.Center.Y / (float)fieldImageSize.Height;

            Rectangle box            = rotatedRect.MinAreaRect();
            float     boxAspectRatio = box.Size.Width > box.Size.Height ? (float)box.Size.Height / (float)box.Size.Width : (float)box.Size.Width / (float)box.Size.Height;

            // Prevent Divide By Zero
            if ((rotatedRect.Size.Height == 0) || (rotatedRect.Size.Width == 0))
            {
                return(false);
            }

            float aspectRatio = (float)width / (float)height;

            // Rotate card if it is on it's side
            if (width > height)
            {
                aspectRatio = (float)height / (float)width;
                angle       = -90.0F;
            }

            // Card should have a height
            if (height < 1.0F)
            {
                return(false);
            }

            // Too small to parse
            if (height < 500.0F)
            {
                return(false);
            }

            // Too small to parse
            if (width < 500.0F)
            {
                return(false);
            }

            Debug.WriteLine("Potential Card Contour - Center: {0}/{1} Relative Center: ({9:0.00}%)/({10:0.00}%) Width: {2} ({11:0.00}%) Height: {3} ({12:0.00}%) Area: {4} : AspectRatio: {5}, Angle: {6} Image Size: {7}/{8}",
                            rotatedRect.Center.X, rotatedRect.Center.Y,
                            rotatedRect.Size.Width, rotatedRect.Size.Height,
                            area, aspectRatio, rotatedRect.Angle,
                            fieldImageSize.Width, fieldImageSize.Height,
                            relativeCenterX * 100.0F,
                            relativeCenterY * 100.0F,
                            widthRatio * 100.0F,
                            heightRatio * 100.0F);

            // Find the Card Aspect Raito
            if (aspectRatio < MinCardAspectRatio || aspectRatio > MaxCardAspectRatio)
            {
                return(false);
            }

            using (Mat image = GetImage())
            {
                using (Mat rot_mat = new RotationMatrix2D(rotatedRect.Center, rotatedRect.Angle + angle, 1.0))
                {
                    using (Mat rotated = new Mat())
                    {
                        // Rotate
                        CvInvoke.WarpAffine(image, rotated, rot_mat, image.Size, interpMethod: Inter.Cubic);

                        // Adjust For Rotation
                        Size size;
                        if (rotatedRect.Angle + angle < -90)
                        {
                            size = new Size((int)rotatedRect.Size.Height, (int)rotatedRect.Size.Width);
                        }
                        else
                        {
                            size = new Size((int)rotatedRect.Size.Width, (int)rotatedRect.Size.Height);
                        }

                        using (Mat cropped = new Mat())
                        {
                            CvInvoke.GetRectSubPix(rotated, size, rotatedRect.Center, cropped);
                            result = cropped.Clone();
                            return(true);
                        }
                    }
                }
            }
        }
Пример #16
0
        private void CalculateTransformedBaseLines()
        {
            PointF center = new PointF(_inputImage.Width * 0.5f, _inputImage.Height * 0.5f);
            _rotationAngle = 180.0 - Math.Atan2((_baseLineStart.Y - _baseLineEnd.Y), (_baseLineStart.X - _baseLineEnd.X)) / Math.PI * 180;
            RotationMatrix2D<float> rotationMatrix = new RotationMatrix2D<float>(center, -_rotationAngle, 1);

            PointF[] p = new PointF[] { new PointF(_baseLineStart.X, _baseLineStart.Y) ,
                new PointF(BaseLineEnd.X, BaseLineEnd.Y)};
            rotationMatrix.RotatePoints(p);
            transformedBaseLineStart = new Point((int)p[0].X, (int)p[0].Y);
            transformedBaseLineEnd = new Point((int)p[1].X, (int)p[1].Y);
        }
Пример #17
0
      public void TestRotationMatrix2D()
      {
         double angle = 32;
         Size size = new Size(960, 480);
         PointF center = new PointF(size.Width * 0.5f, size.Height * 0.5f);
         using (RotationMatrix2D rotationMatrix = new RotationMatrix2D(center, -angle, 1))
         {
            PointF[] corners = new PointF[] {
                  new PointF(0, 0),
                  new PointF(size.Width - 1, 0),
                  new PointF(size.Width - 1, size.Height - 1),
                  new PointF(0, size.Height - 1)};
            PointF[] oldCorners = new PointF[corners.Length];
            corners.CopyTo(oldCorners, 0);

            rotationMatrix.RotatePoints(corners);

            Mat transformation = CvInvoke.EstimateRigidTransform(oldCorners, corners, true);

            Matrix<double> delta = new Matrix<double>(transformation.Size);
            CvInvoke.AbsDiff(rotationMatrix, transformation, delta);
            double min = 0, max = 0;
            Point minLoc = new Point(), maxLoc = new Point();
            CvInvoke.MinMaxLoc(delta, ref min, ref max, ref minLoc, ref maxLoc, null);

            double min2, max2;
            int[] minLoc2 = new int[2], maxLoc2 = new int[2];
            CvInvoke.MinMaxIdx(delta, out min2, out max2, minLoc2, maxLoc2, null);
            EmguAssert.IsTrue(min == min2);
            EmguAssert.IsTrue(max == max2);
            EmguAssert.IsTrue(minLoc.X == minLoc2[1]);
            EmguAssert.IsTrue(minLoc.Y == minLoc2[0]);
            EmguAssert.IsTrue(maxLoc.X == maxLoc2[1]);
            EmguAssert.IsTrue(maxLoc.Y == maxLoc2[0]);

            EmguAssert.IsTrue(max < 1.0e-4, String.Format("Error {0} is too large. Expected to be less than 1.0e-4", max));
         }
      }
Пример #18
0
        public void DeSkew()
        {
            Rectangle vBoundary = new Rectangle(new Point(0, 0), new Size(140, originalImage.Height));

            Emgu.CV.Cvb.CvBlobDetector bDetect    = new Emgu.CV.Cvb.CvBlobDetector();
            Emgu.CV.Cvb.CvBlobs        markerBlob = new Emgu.CV.Cvb.CvBlobs();

            List <Rectangle> blobs = new List <Rectangle>();

            Image <Gray, Byte> preprocessImage = originalImage.Convert <Gray, Byte>();

            preprocessImage = preprocessImage.ThresholdBinary(new Gray(200), new Gray(255));
            preprocessImage = preprocessImage.Not();

            markerBlob.Clear();

            bDetect.Detect(preprocessImage, markerBlob);
            preprocessImage.Dispose();
            preprocessImage = null;

            markerBlob.FilterByArea(250, 1800);

            foreach (Emgu.CV.Cvb.CvBlob targetBlob in markerBlob.Values)
            {
                if (vBoundary.Contains(targetBlob.BoundingBox))
                {
                    if (targetBlob.BoundingBox.Width >= targetBlob.BoundingBox.Height - 5)
                    {
                        Rectangle r = new Rectangle(targetBlob.BoundingBox.X, targetBlob.BoundingBox.Y, targetBlob.BoundingBox.Width, targetBlob.BoundingBox.Height);
                        blobs.Add(r);
                    }
                }
            }

            RectangleF temp  = blobs.First();
            RectangleF temp2 = blobs.Last();

            double dY = Math.Abs(temp.Y - temp2.Y);
            double dX = Math.Abs(temp.X - temp2.X);

            double angle = Math.Atan2(dX, dY);

            angle = angle * (180 / Math.PI);

            if (temp2.X > temp.X)
            {
                angle = angle * -1;
            }

            RotatedRect rot_rec = new RotatedRect();

            rot_rec.Center = new PointF(temp.X, temp.Y);
            RotationMatrix2D  rot_mat  = new RotationMatrix2D(rot_rec.Center, angle, 1);
            Image <Bgr, Byte> outimage = originalImage.CopyBlank();

            CvInvoke.WarpAffine(originalImage, outimage, rot_mat, originalImage.Size, Inter.Cubic, Warp.Default, BorderType.Constant, new Bgr(Color.White).MCvScalar);

            int xOffset = 80 - (int)temp.X;
            int yOffset = 45 - (int)temp.Y;

            originalImage = outimage.Copy();

            Bitmap     a      = originalImage.ToBitmap();
            CanvasMove filter = new CanvasMove(new AForge.IntPoint(xOffset, yOffset), Color.White);

            a             = filter.Apply(a);
            originalImage = new Image <Bgr, Byte>(a);

            a.Dispose();
            a = null;
            outimage.Dispose();
            outimage = null;
            blobs    = null;
        }
Пример #19
0
 public void TestRotationMatrix2D()
 {
    RotationMatrix2D mat = new RotationMatrix2D(new PointF(1, 2), 30, 1);
    RotationMatrix2D mat2 = new RotationMatrix2D(new PointF(1, 2), 30, 1);
    //Trace.WriteLine(Emgu.Toolbox.MatrixToString<float>(mat.Data, ", ", ";\r\n"));
 }
Пример #20
0
        public static String correctTiltImage64(String base64String)
        {
            if (String.IsNullOrWhiteSpace(base64String))
            {
                return(null);
            }
            //convert base64 to Systems. Drwawing image
            var img = StringToImage(base64String);

            // convert image to bitmap
            Bitmap bmp = new Bitmap(img);

            //convert image to emgu cv image
            Image <Bgr, Byte> My_Image = new Image <Bgr, byte>(bmp);

            // convert image to gray scale
            Image <Gray, Byte> result = new Image <Gray, byte>(My_Image.ToBitmap());
            UMat u = result.ToUMat();

            //apply canny edge detection
            CvInvoke.Canny(result, result, 150, 50, 3);
            double angle;

            // detect lines by applying houghline transform
            LineSegment2D[] lines;
            using (var vector = new VectorOfPointF())
            {
                CvInvoke.HoughLines(result, vector,
                                    1,
                                    Math.PI / 180,
                                    350);

                var   linesList = new List <LineSegment2D>();
                Point pt1       = new Point();;
                Point pt2       = new Point();;

                float theta = 0;

                for (var i = 0; i < vector.Size; i++)
                {
                    var rho = vector[i].X;
                    theta = vector[i].Y;
                    var a  = Math.Cos(theta);
                    var b  = Math.Sin(theta);
                    var x0 = a * rho;
                    var y0 = b * rho;
                    pt1.X = (int)Math.Round(x0 + 1000 * (-b));
                    pt1.Y = (int)Math.Round(y0 + 1000 * (a));
                    pt2.X = (int)Math.Round(x0 - 1000 * (-b));
                    pt2.Y = (int)Math.Round(y0 - 1000 * (a));

                    linesList.Add(new LineSegment2D(pt1, pt2));
                }
                if (theta == 0)
                {
                    int   angle90 = 90;
                    float height  = My_Image.Height;
                    float width   = My_Image.Width;
                    float cx      = width / 2f;
                    float cy      = height / 2f;

                    RotationMatrix2D rotationMatrix1 = new RotationMatrix2D(new PointF(My_Image.Width / 2f, My_Image.Height / 2f), angle90, 1);
                    Size             imgsize1        = My_Image.Size;
                    // rotate image by 270 clockwise
                    img.RotateFlip(RotateFlipType.Rotate270FlipNone);
                    My_Image = new Image <Bgr, byte>(new Bitmap(img));
                }
                else
                {
                    //draw line to calculate tilt
                    //My_Image.Draw(new LineSegment2D(pt1, pt2), new Bgr(Color.Red), 2);
                    //refrence horizontal line
                    //My_Image.Draw(new LineSegment2D(new Point(pt1.X, pt1.Y), new Point(pt2.X, pt1.Y)), new Bgr(Color.Green), 2);

                    //find slope and calculate angle
                    angle = slope(pt1, pt2) - slope(new Point(pt1.X, pt1.Y), new Point(pt2.X, pt1.Y));
                    angle = Math.Atan(angle);
                    angle = (180 / Math.PI) * angle;
                    //rotate counter clock wise
                    RotationMatrix2D rotationMatrix = new RotationMatrix2D(new PointF(My_Image.Width / 2f, My_Image.Height / 2f), angle, 1);
                    Size             imgsize        = My_Image.Size;
                    CvInvoke.WarpAffine(My_Image, My_Image, rotationMatrix, imgsize, Inter.Cubic, Warp.Default, BorderType.Replicate, default(MCvScalar));
                }

                lines = linesList.ToArray();
            }

            //convert corrected image to base64 string
            var bytes = My_Image.ToJpegData(80);

            base64String = Convert.ToBase64String(bytes);



            return(base64String);
        }
Пример #21
0
        static Image <Rgb, byte> deskew(Image <Rgb, byte> image, Size structuringElementSize, int contourMaxCount, double angleMaxDeviation, Size margin, Rgb marginRgb) //good
        {                                                                                                                                                                //https://becominghuman.ai/how-to-automatically-deskew-straighten-a-text-image-using-opencv-a0c30aed83df
            Image <Gray, byte> image2 = image.Convert <Gray, byte>();

            CvInvoke.BitwiseNot(image2, image2);//to negative
            //CvInvoke.GaussianBlur(image2, image2, new Size((int)(9f / Settings.Constants.Pdf2ImageResolutionRatio), (int)(9f / Settings.Constants.Pdf2ImageResolutionRatio)), 0);//remove small spots
            CvInvoke.Threshold(image2, image2, 125, 255, ThresholdType.Otsu | ThresholdType.Binary);
            Mat se = CvInvoke.GetStructuringElement(ElementShape.Rectangle, structuringElementSize, new Point(-1, -1));

            CvInvoke.Dilate(image2, image2, se, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);
            //Emgu.CV.CvInvoke.Erode(image, image, null, new Point(-1, -1), 1, BorderType.Constant, CvInvoke.MorphologyDefaultBorderValue);

            Image <Rgb, byte> image3 = new Image <Rgb, byte>(image.Width + 2 * margin.Width, image.Height + 2 * margin.Height, marginRgb);

            image3.ROI = new Rectangle(new Point(margin.Width, margin.Height), image.Size);
            image.CopyTo(image3);
            image3.ROI = Rectangle.Empty;

            VectorOfVectorOfPoint contours = new VectorOfVectorOfPoint();
            Mat hierarchy = new Mat();

            CvInvoke.FindContours(image2, contours, hierarchy, RetrType.List, ChainApproxMethod.ChainApproxSimple);
            if (contours.Size < 1)
            {
                return(null);
            }

            double angle = 0;
            //when contourMaxCount == 1, it just looks by the most lengthy block
            List <(double angle, int w)> cs = new List <(double angle, int w)>();

            for (int i = 0; i < contours.Size; i++)
            {
                RotatedRect rr = CvInvoke.MinAreaRect(contours[i]);
                Rectangle   r  = rr.MinAreaRect();
                int         w  = r.Width > r.Height ? r.Width : r.Height;
                double      a  = rr.Angle;
                if (a > 45)
                {
                    a -= 90;
                }
                else if (a < -45)
                {
                    a += 90;
                }
                cs.Add((angle: a, w: w));
            }
            cs = cs.OrderByDescending(a => a.w).Take(contourMaxCount).OrderBy(a => a.angle).ToList();
            if (cs.Count < 1)
            {
                angle = 0;
            }
            else if (cs.Count < 2)//use the most lengthy block
            {
                angle = cs[0].angle;
            }
            else
            {
                List <List <int> > dss = new List <List <int> >();
                List <int>         ds  = new List <int>();
                for (int i = 1; i < cs.Count; i++)
                {
                    if (Math.Abs(cs[i].angle - cs[i - 1].angle) < angleMaxDeviation)
                    {
                        ds.Add(i);
                    }
                    else
                    {
                        dss.Add(ds);
                        ds = new List <int>();
                    }
                }
                dss.Add(ds);
                ds = dss.OrderByDescending(a => a.Count).FirstOrDefault();
                if (ds.Count < 1)
                {
                    angle = 0;
                }
                else
                {
                    // angle = as_[ds.OrderBy(a => Math.Abs(as_[a].angle - as_[a - 1].angle)).FirstOrDefault()].angle;
                    angle = (cs[ds[0] - 1].angle + ds.Sum(a => cs[a].angle)) / (1 + ds.Count);
                }
            }
            if (angle == 0)
            {
                return(image3);
            }

            RotationMatrix2D rotationMat = new RotationMatrix2D();

            CvInvoke.GetRotationMatrix2D(new PointF((float)image3.Width / 2, (float)image3.Height / 2), angle, 1, rotationMat);
            //image3.ROI = new Rectangle(new Point(offset.Width, offset.Height), image.Size);
            CvInvoke.WarpAffine(image3, image3, rotationMat, image3.Size, borderValue: marginRgb.MCvScalar);
            return(image3);
        }
Пример #22
0
        public void DeSkew()
        {
            Rectangle vBoundary = new Rectangle(new Point(0, 0), new Size(140, originalImage.Height));

            Emgu.CV.Cvb.CvBlobDetector bDetect = new Emgu.CV.Cvb.CvBlobDetector();
            Emgu.CV.Cvb.CvBlobs markerBlob = new Emgu.CV.Cvb.CvBlobs();

            List<Rectangle> blobs = new List<Rectangle>();

            Image<Gray, Byte> preprocessImage = originalImage.Convert<Gray, Byte>();
            preprocessImage = preprocessImage.ThresholdBinary(new Gray(200), new Gray(255));
            preprocessImage = preprocessImage.Not();

            markerBlob.Clear();

            bDetect.Detect(preprocessImage, markerBlob);
            preprocessImage.Dispose();
            preprocessImage = null;

            markerBlob.FilterByArea(250, 1800);

            foreach (Emgu.CV.Cvb.CvBlob targetBlob in markerBlob.Values)
            {
                if (vBoundary.Contains(targetBlob.BoundingBox))
                {
                    if (targetBlob.BoundingBox.Width >= targetBlob.BoundingBox.Height - 5)
                    {
                        Rectangle r = new Rectangle(targetBlob.BoundingBox.X, targetBlob.BoundingBox.Y, targetBlob.BoundingBox.Width, targetBlob.BoundingBox.Height);
                        blobs.Add(r);
                    }
                }
            }

            RectangleF temp = blobs.First();
            RectangleF temp2 = blobs.Last();

            double dY = Math.Abs(temp.Y - temp2.Y);
            double dX = Math.Abs(temp.X - temp2.X);

            double angle = Math.Atan2(dX, dY);
            angle = angle * (180 / Math.PI);

            if (temp2.X > temp.X)
            {
                angle = angle * -1;
            }

            RotatedRect rot_rec = new RotatedRect();
            rot_rec.Center = new PointF(temp.X, temp.Y);
            RotationMatrix2D rot_mat = new RotationMatrix2D(rot_rec.Center, angle, 1);
            Image<Bgr, Byte> outimage = originalImage.CopyBlank();
            CvInvoke.WarpAffine(originalImage, outimage, rot_mat, originalImage.Size, Inter.Cubic, Warp.Default, BorderType.Constant, new Bgr(Color.White).MCvScalar);

            int xOffset = 80 - (int)temp.X;
            int yOffset = 45 - (int)temp.Y;

            originalImage = outimage.Copy();

            Bitmap a = originalImage.ToBitmap();
            CanvasMove filter = new CanvasMove(new AForge.IntPoint(xOffset, yOffset), Color.White);
            a = filter.Apply(a);
            originalImage = new Image<Bgr, Byte>(a);

            a.Dispose();
            a = null;
            outimage.Dispose();
            outimage = null;
            blobs = null;
        }
Пример #23
0
        /// <summary>
        /// Estimate rigid transformation between 2 point sets.
        /// </summary>
        /// <param name="src">The points from the source image</param>
        /// <param name="dest">The corresponding points from the destination image</param>
        /// <param name="fullAffine">Indicates if full affine should be performed</param>
        /// <returns>If success, the 2x3 rotation matrix that defines the Affine transform. Otherwise null is returned.</returns>
        public static RotationMatrix2D<double> EstimateRigidTransform(PointF[] src, PointF[] dest, bool fullAffine)
        {
            RotationMatrix2D<double> result = new RotationMatrix2D<double>();
             GCHandle handleA = GCHandle.Alloc(src, GCHandleType.Pinned);
             GCHandle handleB = GCHandle.Alloc(dest, GCHandleType.Pinned);
             bool success;
             using (Matrix<float> a = new Matrix<float>(src.Length, 1, 2, handleA.AddrOfPinnedObject(), 2 * sizeof(float)))
             using (Matrix<float> b = new Matrix<float>(dest.Length, 1, 2, handleB.AddrOfPinnedObject(), 2 * sizeof(float)))
             {
            success = CvInvoke.cvEstimateRigidTransform(a, b, result, fullAffine);
             }
             handleA.Free();
             handleB.Free();

             if (success)
             {
            return result;
             }
             else
             {
            result.Dispose();
            return null;
             }
        }
Пример #24
0
        /// <summary>
        /// Calculates the matrix of an affine transform such that:
        /// (x'_i,y'_i)^T=map_matrix (x_i,y_i,1)^T
        /// where dst(i)=(x'_i,y'_i), src(i)=(x_i,y_i), i=0..2.
        /// </summary>
        /// <param name="src">Coordinates of 3 triangle vertices in the source image. If the array contains more than 3 points, only the first 3 will be used</param>
        /// <param name="dest">Coordinates of the 3 corresponding triangle vertices in the destination image. If the array contains more than 3 points, only the first 3 will be used</param>
        /// <returns>The 2x3 rotation matrix that defines the Affine transform</returns>
        public static RotationMatrix2D<double> GetAffineTransform(PointF[] src, PointF[] dest)
        {
            Debug.Assert(src.Length >= 3, "The source should contain at least 3 points");
             Debug.Assert(dest.Length >= 3, "The destination should contain at least 3 points");

             RotationMatrix2D<double> rot = new RotationMatrix2D<double>();
             CvInvoke.cvGetAffineTransform(src, dest, rot);
             return rot;
        }
Пример #25
0
 public void TestRotationMatrix2D()
 {
     RotationMatrix2D <float>  mat  = new RotationMatrix2D <float>(new PointF(1, 2), 30, 1);
     RotationMatrix2D <double> mat2 = new RotationMatrix2D <double>(new PointF(1, 2), 30, 1);
     //Trace.WriteLine(Emgu.Toolbox.MatrixToString<float>(mat.Data, ", ", ";\r\n"));
 }