Exemplo n.º 1
0
    public static void UpdateLineMesh(OpticalFlowWorker.AsyncResult r, Mesh mesh, CvPoint2D32f[] velocities, float limitVelocity)
    {
        var vertices = new Vector3[r.nCorners * 2];
        var colors = new Color[vertices.Length];
        var indices = new int[vertices.Length];
        var limitSqrVelocity = limitVelocity * limitVelocity;
        var c0s = r.corners0;
        var rTexelSize = new Vector2(1f / r.imageWidth, 1f / r.imageHeight);
        for (var i = 0; i < r.nCorners; i++) {
            var vertexIndex = 2 * i;
            var c0 = c0s[i];
            var v0 = new Vector3(c0.X * rTexelSize.x - 0.5f,  -(c0.Y * rTexelSize.y - 0.5f), 0f);

            var cv = velocities[i];
            var v = new Vector3(cv.X * rTexelSize.x, cv.Y * rTexelSize.y, 0f);
            var rad = Mathf.Atan2(v.y, v.x);
            if (rad < 0)
                rad += 2 * Mathf.PI;
            var color = HSBColor.ToColor(new HSBColor(rad * R_TWO_PI, 1f, 1f));

            if (limitSqrVelocity < v.sqrMagnitude)
                v = Vector3.zero;
            vertices[vertexIndex] = v0;
            vertices[vertexIndex + 1] = v0 + v;
            colors[vertexIndex] = color;
            colors[vertexIndex + 1] = color;
            indices[vertexIndex] = vertexIndex;
            indices[vertexIndex + 1] = vertexIndex + 1;
        }
        mesh.vertices = vertices;
        mesh.colors = colors;
        mesh.SetIndices(indices, MeshTopology.Lines, 0);
        mesh.RecalculateBounds();
    }
Exemplo n.º 2
0
        public Perspective()
        {
            using (var srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (var dstImg = srcImg.Clone())
            {
                CvPoint2D32f[] srcPnt = new CvPoint2D32f[4];
                CvPoint2D32f[] dstPnt = new CvPoint2D32f[4];
                srcPnt[0] = new CvPoint2D32f(150.0f, 150.0f);
                srcPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
                srcPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
                srcPnt[3] = new CvPoint2D32f(350.0f, 150.0f);
                dstPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
                dstPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
                dstPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
                dstPnt[3] = new CvPoint2D32f(300.0f, 200.0f);
                using (CvMat mapMatrix = Cv.GetPerspectiveTransform(srcPnt, dstPnt))
                {
                    Cv.WarpPerspective(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(100));

                    using (new CvWindow("src", srcImg))
                    using (new CvWindow("dst", dstImg))
                    {
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Exemplo n.º 3
0
        public Perspective()
        {
            // cvGetPerspectiveTransform + cvWarpPerspective
            // 画像上の4点対応より透視投影変換行列を計算し,その行列を用いて画像全体の透視投影変換を行う.

            // (1)画像の読み込み,出力用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {
                // (2)四角形の変換前と変換後の対応する頂点をそれぞれセットし
                //    cvWarpPerspectiveを用いて透視投影変換行列を求める  
                CvPoint2D32f[] srcPnt = new CvPoint2D32f[4];
                CvPoint2D32f[] dstPnt = new CvPoint2D32f[4];
                srcPnt[0] = new CvPoint2D32f(150.0f, 150.0f);
                srcPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
                srcPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
                srcPnt[3] = new CvPoint2D32f(350.0f, 150.0f);
                dstPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
                dstPnt[1] = new CvPoint2D32f(150.0f, 300.0f);
                dstPnt[2] = new CvPoint2D32f(350.0f, 300.0f);
                dstPnt[3] = new CvPoint2D32f(300.0f, 200.0f);
                using (CvMat mapMatrix = Cv.GetPerspectiveTransform(srcPnt, dstPnt))
                {
                    // (3)指定されたアフィン行列により,cvWarpAffineを用いて画像を回転させる
                    Cv.WarpPerspective(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(100));
                    // (4)結果を表示する
                    using (new CvWindow("src", srcImg))
                    using (new CvWindow("dst", dstImg))
                    {
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Exemplo n.º 4
0
        public Affine()
        {
            // cvGetAffineTransform + cvWarpAffine
            // 画像上の3点対応よりアフィン変換行列を計算し,その行列を用いて画像全体のアフィン変換を行う.

            // (1)画像の読み込み,出力用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageGoryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {

                // (2)三角形の回転前と回転後の対応する頂点をそれぞれセットし  
                //    cvGetAffineTransformを用いてアフィン行列を求める  
                CvPoint2D32f[] srcPnt = new CvPoint2D32f[3];
                CvPoint2D32f[] dstPnt = new CvPoint2D32f[3];
                srcPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
                srcPnt[1] = new CvPoint2D32f(250.0f, 200.0f);
                srcPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
                dstPnt[0] = new CvPoint2D32f(300.0f, 100.0f);
                dstPnt[1] = new CvPoint2D32f(300.0f, 50.0f);
                dstPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
                using (CvMat mapMatrix = Cv.GetAffineTransform(srcPnt, dstPnt))
                {
                    // (3)指定されたアフィン行列により,cvWarpAffineを用いて画像を回転させる
                    Cv.WarpAffine(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(0));
                    // (4)結果を表示する
                    using (new CvWindow("src", srcImg)) 
                    using (new CvWindow("dst", dstImg))
                    {
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Exemplo n.º 5
0
        public Affine()
        {
            // cvGetAffineTransform + cvWarpAffine

            using (IplImage srcImg = new IplImage(FilePath.Image.Goryokaku, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {
                CvPoint2D32f[] srcPnt = new CvPoint2D32f[3];
                CvPoint2D32f[] dstPnt = new CvPoint2D32f[3];
                srcPnt[0] = new CvPoint2D32f(200.0f, 200.0f);
                srcPnt[1] = new CvPoint2D32f(250.0f, 200.0f);
                srcPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
                dstPnt[0] = new CvPoint2D32f(300.0f, 100.0f);
                dstPnt[1] = new CvPoint2D32f(300.0f, 50.0f);
                dstPnt[2] = new CvPoint2D32f(200.0f, 100.0f);
                using (CvMat mapMatrix = Cv.GetAffineTransform(srcPnt, dstPnt))
                {
                    Cv.WarpAffine(srcImg, dstImg, mapMatrix, Interpolation.Linear | Interpolation.FillOutliers, CvScalar.ScalarAll(0));

                    using (new CvWindow("src", srcImg)) 
                    using (new CvWindow("dst", dstImg))
                    {
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Exemplo n.º 6
0
        public PixelSampling()
        {
            // 並進移動のためのピクセルサンプリング cvGetRectSubPix

            // (1) 画像の読み込み,出力用画像領域の確保を行なう 
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {
                // (2)dst_imgの画像中心になるsrc_img中の位置centerを指定する
                CvPoint2D32f center = new CvPoint2D32f
                {
                    X = srcImg.Width - 1,
                    Y = srcImg.Height - 1
                };
                // (3)centerが画像中心になるように,GetRectSubPixを用いて画像全体をシフトさせる
                Cv.GetRectSubPix(srcImg, dstImg, center);
                // (4)結果を表示する
                using (CvWindow wSrc = new CvWindow("src"))
                using (CvWindow wDst = new CvWindow("dst"))
                {
                    wSrc.Image = srcImg;
                    wDst.Image = dstImg;
                    Cv.WaitKey(0);
                }
            }


            // 回転移動のためのピクセルサンプリング cvGetQuadrangleSubPix

            const int angle = 45;
            // (1)画像の読み込み,出力用画像領域の確保を行なう
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {
                // (2)回転のための行列(アフィン行列)要素を設定し,CvMat行列Mを初期化する
                float[] m = new float[6];
                m[0] = (float)(Math.Cos(angle * Cv.PI / 180.0));
                m[1] = (float)(-Math.Sin(angle * Cv.PI / 180.0));
                m[2] = srcImg.Width * 0.5f;
                m[3] = -m[1];
                m[4] = m[0];
                m[5] = srcImg.Height * 0.5f;
                using (CvMat mat = new CvMat(2, 3, MatrixType.F32C1, m))
                {
                    // (3)指定された回転行列により,GetQuadrangleSubPixを用いて画像全体を回転させる
                    Cv.GetQuadrangleSubPix(srcImg, dstImg, mat);
                    // (4)結果を表示する
                    using (CvWindow wSrc = new CvWindow("src"))
                    using (CvWindow wDst = new CvWindow("dst"))
                    {
                        wSrc.Image = srcImg;
                        wDst.Image = dstImg;
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Exemplo n.º 7
0
 private void GetEnclosingCircle(
     IEnumerable<CvPoint> points, out CvPoint2D32f center, out float radius)
 {
     var pointsArray = points.ToArray();
     using (var pointsMat = new CvMat(pointsArray.Length, 1, MatrixType.S32C2, pointsArray))
     {
         Cv.MinEnclosingCircle(pointsMat, out center, out radius);
     }
 }
        public AsyncResult CalculateOpticalFlow(CvPoint2D32f[] corners0)
        {
            var r = new AsyncResult();
            r.prevTime = _prevTime = _currTime;
            r.currTime = _currTime = Time.time;
            r.corners0 = corners0;
            r.nCorners = corners0.Length;

            ThreadPool.QueueUserWorkItem(_CalculateOpticalFlow, r);
            return r;
        }
Exemplo n.º 9
0
        public sMarkerInfo()
        {
            width = 0.0;
            height = 0.0;
            ID = -1;

            for (int i = 0; i < 4; i++)
            {
                corner[i] = new CvPoint2D32f(0, 0);
            }
        }
Exemplo n.º 10
0
        public static CvCircleSegment Approximate(CvPoint[] points)
        {
            CvPoint2D32f[] points2D32f = new CvPoint2D32f[points.Length];

            for (int i = 0; i < points.Length; i++)
            {
                points2D32f[i].X = (float)points[i].X;
                points2D32f[i].Y = (float)points[i].Y;
            }

            return Approximate(points2D32f);
        }
Exemplo n.º 11
0
        public PixelSampling()
        {
            // cvGetRectSubPix

            using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {
                CvPoint2D32f center = new CvPoint2D32f
                {
                    X = srcImg.Width - 1,
                    Y = srcImg.Height - 1
                };

                Cv.GetRectSubPix(srcImg, dstImg, center);

                using (CvWindow wSrc = new CvWindow("src"))
                using (CvWindow wDst = new CvWindow("dst"))
                {
                    wSrc.Image = srcImg;
                    wDst.Image = dstImg;
                    Cv.WaitKey(0);
                }
            }


            // cvGetQuadrangleSubPix

            const int angle = 45;

            using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyDepth | LoadMode.AnyColor))
            using (IplImage dstImg = srcImg.Clone())
            {
                float[] m = new float[6];
                m[0] = (float)(Math.Cos(angle * Cv.PI / 180.0));
                m[1] = (float)(-Math.Sin(angle * Cv.PI / 180.0));
                m[2] = srcImg.Width * 0.5f;
                m[3] = -m[1];
                m[4] = m[0];
                m[5] = srcImg.Height * 0.5f;
                using (CvMat mat = new CvMat(2, 3, MatrixType.F32C1, m))
                {
                    Cv.GetQuadrangleSubPix(srcImg, dstImg, mat);

                    using (CvWindow wSrc = new CvWindow("src"))
                    using (CvWindow wDst = new CvWindow("dst"))
                    {
                        wSrc.Image = srcImg;
                        wDst.Image = dstImg;
                        Cv.WaitKey(0);
                    }
                }
            }
        }
Exemplo n.º 12
0
 private CvPoint2D32f[] GetRandomPoints(int count, CvSize imageSize)
 {
     Random rand = new Random();
     CvPoint2D32f[] points = new CvPoint2D32f[count];
     double a = rand.NextDouble() + 0.5;
     for (int i = 0; i < points.Length; i++)
     {
         double x = rand.Next(imageSize.Width);
         double y = (x * a) + (rand.Next(100) - 50);
         points[i] = new CvPoint2D32f(x, y);
     }
     return points;
 }
Exemplo n.º 13
0
    public static void CalculateFlowVelocities(OpticalFlowWorker.AsyncResult r, ref CvPoint2D32f[] velocities)
    {
        if (velocities == null || velocities.Length != r.nCorners)
            velocities = new CvPoint2D32f[r.nCorners];

        var c0s = r.corners0;
        var c1s = r.corners1;
        for (var i = 0; i < r.nCorners; i++) {
            var c0 = c0s[i];
            var c1 = c1s[i];
            var cv = c1 - c0;
            velocities[i] = cv;
        }
    }
Exemplo n.º 14
0
 public static OpenCvSharp.CvPoint2D32f[] GenGridCorners(int width, int height, float gridSize)
 {
     var nx = (int)(width / gridSize);
     var ny = (int)(height / gridSize);
     var corners = new CvPoint2D32f[nx * ny];
     var offset = gridSize * 0.5f;
     for (var y = 0; y < ny; y++) {
         for (var x = 0; x < nx; x++) {
             var index = x + y * nx;
             corners[index] = new CvPoint2D32f(offset + x * gridSize, offset + y * gridSize);
         }
     }
     return corners;
 }
Exemplo n.º 15
0
        public Delaunay()
        {
            CvRect rect = new CvRect(0, 0, 600, 600);
            CvColor activeFacetColor = new CvColor(255, 0, 0);
            CvColor delaunayColor = new CvColor(0, 0, 0);
            CvColor voronoiColor = new CvColor(0, 180, 0);
            CvColor bkgndColor = new CvColor(255, 255, 255);
            Random rand = new Random();
            
            using (CvMemStorage storage = new CvMemStorage(0))
            using (IplImage img = new IplImage(rect.Size, BitDepth.U8, 3))
            using (CvWindow window = new CvWindow("delaunay"))
            {
                img.Set(bkgndColor);
                CvSubdiv2D subdiv = new CvSubdiv2D(rect, storage);
                for (int i = 0; i < 200; i++)
                {
                    CvPoint2D32f fp = new CvPoint2D32f
                    {
                        X = (float)rand.Next(5, rect.Width - 10),
                        Y = (float)rand.Next(5, rect.Height - 10)
                    };
                    LocatePoint(subdiv, fp, img, activeFacetColor);
                    window.Image = img;

                    if (CvWindow.WaitKey(100) >= 0)
                    {
                        break;
                    }
                    subdiv.Insert(fp);
                    subdiv.CalcVoronoi2D();
                    img.Set(bkgndColor);
                    DrawSubdiv(img, subdiv, delaunayColor, voronoiColor);
                    window.Image = img;
                    if (CvWindow.WaitKey(100) >= 0)
                    {
                        break;
                    }
                }
                img.Set(bkgndColor);
                PaintVoronoi(subdiv, img);
                window.Image = img;

                CvWindow.WaitKey(0);
            }
        }
Exemplo n.º 16
0
        public FlannTest()
        {
            Console.WriteLine("===== FlannTest =====");

            // creates data set
            using (Mat features = new Mat(10000, 2, MatrixType.F32C1))
            {
                Random rand = new Random();
                for (int i = 0; i < features.Rows; i++)
                {
                    features.Set<float>(i, 0, rand.Next(10000));
                    features.Set<float>(i, 1, rand.Next(10000));
                }

                // query
                CvPoint2D32f queryPoint = new CvPoint2D32f(7777, 7777);
                Mat queries = new Mat(1, 2, MatrixType.F32C1);
                queries.Set<float>(0, 0, queryPoint.X);
                queries.Set<float>(0, 1, queryPoint.Y);
                Console.WriteLine("query:({0}, {1})", queryPoint.X, queryPoint.Y);
                Console.WriteLine("-----");

                // knnSearch
                using (Index nnIndex = new Index(features, new KDTreeIndexParams(4)))
                {
                    int knn = 1;
                    int[] indices;
                    float[] dists;
                    nnIndex.KnnSearch(queries, out indices, out dists, knn, new SearchParams(32));

                    for (int i = 0; i < knn; i++)
                    {
                        int index = indices[i];
                        float dist = dists[i];
                        CvPoint2D32f pt = new CvPoint2D32f(features.Get<float>(index, 0), features.Get<float>(index, 1));
                        Console.Write("No.{0}\t", i);
                        Console.Write("index:{0}", index);
                        Console.Write(" distance:{0}", dist);
                        Console.Write(" data:({0}, {1})", pt.X, pt.Y);
                        Console.WriteLine();
                    }
                    Console.Read();
                }
            }
        }
Exemplo n.º 17
0
        public static double BoxAngle(CvPoint2D32f[] pt)
        {
            if (pt.Length < 4) return Double.NaN;

            // ищем самую высокую точку
            double _min_Y = double.MaxValue;
            int _ind = -1;

            for (int i = 0; i < pt.Length; i++)
            {
                if (pt[i].Y <= _min_Y)
                {
                    _min_Y = pt[i].Y;
                    _ind = i;
                }
            }

            //Ищем самую длинную ось от этой точки
            CvPoint p; // current point
            CvPoint p_prev; //previous point
            CvPoint p_next; //next point
            CvPoint p_angle;

            p = pt[_ind];
            p_prev = (_ind == 0) ? pt[3] : pt[_ind - 1];
            p_next = (_ind == 3) ? pt[0] : pt[_ind + 1];

            double a = CvPoint.Distance(p, p_prev);
            double b = CvPoint.Distance(p, p_next);
            double _angle;

            p_angle = (a > b) ? p_prev : p_next;

            if (p_angle.X > p.X)
            {
                _angle = -GetAngle(p_angle, p, new CvPoint(p_angle.X, p.Y));
            }
            else
            {
                _angle = GetAngle(p_angle, p, new CvPoint(p_angle.X, p.Y));
            }
            return _angle;
        }
Exemplo n.º 18
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="subdiv"></param>
        /// <param name="fp"></param>
        /// <param name="img"></param>
        /// <param name="active_color"></param>
        private void LocatePoint(CvSubdiv2D subdiv, CvPoint2D32f fp, IplImage img, CvScalar active_color)
        {
            CvSubdiv2DEdge e;
            CvSubdiv2DEdge e0 = 0;

            subdiv.Locate(fp, out e0);

            if (e0 != 0)
            {
                e = e0;
                do
                {
                    //Console.WriteLine(e);
                    DrawSubdivEdge(img, e, active_color);
                    e = e.GetEdge(CvNextEdgeType.NextAroundLeft);
                }while (e != e0);
            }

            DrawSubdivPoint(img, fp, active_color);
        }
Exemplo n.º 19
0
 public BloodObjects(int Id,
                     CvSeq <CvPoint> Contour,
                     List <CvPoint?> Path,
                     double Area,
                     CvPoint2D32f Center,
                     double Distance,
                     double Compact,
                     double Perimeter,
                     Group group)
 {
     this.Id          = Id;
     this.Contour     = Contour;
     this.ContourPath = Path;
     this.Area        = Area;
     this.Center      = Center;
     this.Distance    = Distance;
     this.Compact     = Compact;
     this.Perimeter   = Perimeter;
     Group            = group;
 }
Exemplo n.º 20
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="subdiv"></param>
        /// <param name="fp"></param>
        /// <param name="img"></param>
        /// <param name="active_color"></param>
        private void LocatePoint(CvSubdiv2D subdiv, CvPoint2D32f fp, IplImage img, CvScalar active_color)
        {
            CvSubdiv2DEdge e;
            CvSubdiv2DEdge e0 = 0;

            subdiv.Locate(fp, out e0);

            if (e0 != 0)
            {
                e = e0;
                do
                {
                    //Console.WriteLine(e);
                    DrawSubdivEdge(img, e, active_color);
                    e = e.GetEdge(CvNextEdgeType.NextAroundLeft);                    
                }
                while (e != e0);
            }

            DrawSubdivPoint(img, fp, active_color);
        }
Exemplo n.º 21
0
    public List <Vector2> FindChessboardsInImage()
    {
        List <Vector2> cornerPixelPoints     = new List <Vector2>();
        CvSize         chessboardPatternSize = new CvSize();

        chessboardPatternSize.width  = ScanningCamera.kChessboardWidth - 1;
        chessboardPatternSize.height = ScanningCamera.kChessboardHeight - 1;

        CvSize searchWindow = new CvSize();

        searchWindow.width  = 15;
        searchWindow.height = 15;

        CvSize zeroZone = new CvSize();

        zeroZone.width  = -1;
        zeroZone.height = -1;
        CvTermCriteria aCrit = new CvTermCriteria(3, 100, 0);

        CvPoint2D32f[] cornerArray       = new CvPoint2D32f[(ScanningCamera.kChessboardHeight - 1) * (ScanningCamera.kChessboardWidth - 1)];
        GCHandle       cornerArrayHandle = GCHandle.Alloc(cornerArray, GCHandleType.Pinned);

        System.IntPtr cornerPointer = cornerArrayHandle.AddrOfPinnedObject();

        Matrix grayCvImage   = new Matrix(imageHeight, imageWidth, MatrixType.cv8UC1);
        Matrix grayHalfScale = new Matrix(imageHeight / kSearchImageScaleFactor,
                                          imageWidth / kSearchImageScaleFactor, MatrixType.cv8UC1);

        FindCornerPixels(imageWidth / 2 + 1, imageWidth, 0, imageWidth / 2,
                         grayCvImage, grayHalfScale, chessboardPatternSize,
                         cornerPointer, cornerArray, searchWindow, zeroZone, aCrit, cornerPixelPoints, 0);

        FindCornerPixels(0, imageWidth / 2, imageWidth / 2 + 1, imageWidth,
                         grayCvImage, grayHalfScale, chessboardPatternSize,
                         cornerPointer, cornerArray, searchWindow, zeroZone, aCrit, cornerPixelPoints, 1);

        cornerArrayHandle.Free();

        return(cornerPixelPoints);
    }
Exemplo n.º 22
0
        public IplImage AffineImage(IplImage src)
        {
            affine = new IplImage(src.Size, BitDepth.U8, 3);

            CvPoint2D32f[] srcPoint = new CvPoint2D32f[3];
            CvPoint2D32f[] dstPoint = new CvPoint2D32f[3];

            srcPoint[0] = new CvPoint2D32f(100.0, 100.0);
            srcPoint[1] = new CvPoint2D32f(src.Width - 100.0, 100.0);
            srcPoint[2] = new CvPoint2D32f(100.0, src.Height - 100.0);

            dstPoint[0] = new CvPoint2D32f(300.0, 100.0);
            dstPoint[1] = new CvPoint2D32f(src.Width - 100.0, 100.0);
            dstPoint[2] = new CvPoint2D32f(100.0, src.Height - 100.0);

            CvMat matrix = Cv.GetAffineTransform(srcPoint, dstPoint);

            Console.WriteLine(matrix);

            Cv.WarpAffine(src, affine, matrix, Interpolation.Linear, CvScalar.ScalarAll(0));

            return(affine);
        }
Exemplo n.º 23
0
        /// <summary>
        /// Аппроксимирует набор точек окружностью
        /// http://forum.sources.ru/index.php?showtopic=354082&view=findpost&p=3107176
        /// </summary>
        /// <param name="points">Аппроксимируемые точки</param>
        /// <returns>Получившаяся окружность</returns>
        public static CvCircleSegment Approximate(CvPoint2D32f[] points)
        {
            CvCircleSegment result = new CvCircleSegment();
            CvMat y = new CvMat(points.Length, 1, MatrixType.F32C1);
            CvMat X = new CvMat(points.Length, 3, MatrixType.F32C1);
            CvMat b = new CvMat(3, 1, MatrixType.F32C1);

            for (int i = 0; i < points.Length; i++)
            {
                y[i, 0] = -1 * (points[i].X * points[i].X + points[i].Y * points[i].Y);
                X[i, 0] = points[i].X;
                X[i, 1] = points[i].Y;
                X[i, 2] = 1;
            }

            Cv.Solve(X, y, b, InvertMethod.Svd);

            result.Center.X = (float)(b[0, 0] / -2.0);
            result.Center.Y = (float)(b[1, 0] / -2.0);
            result.Radius = (float)Math.Sqrt(result.Center.X * result.Center.X + result.Center.Y * result.Center.Y - b[2, 0]);

            return result;
        }
Exemplo n.º 24
0
        // find and return the 4 corner markers positions
        private static CvPoint2D32f[] GetPoints(IplImage src, CvWindow win)
        {
            IplImage gray = new IplImage(src.Size, BitDepth.U8, 1);

            Cv.CvtColor(src, gray, ColorConversion.RgbToGray);

            Mat m = new Mat(gray);

            m.GaussianBlur(new Size(9, 9), 2, 2);
            InputArray ia = InputArray.Create(m);

            CvCircleSegment[] circles = Cv2.HoughCircles(ia, HoughCirclesMethod.Gradient, calDP, calMinDist, calP1, calP2, calMinRadius, calMaxRadius);

            foreach (CvCircleSegment item in circles)
            {
                Cv.DrawCircle(gray, item.Center, 32, CvColor.Green);
            }

            win.Image = gray;

            if (circles.Length > 3)
            {
                Cv.DrawCircle(gray, circles[0].Center, 64, CvColor.Green);

                CvPoint2D32f[] pts = new CvPoint2D32f[4];
                for (int i = 0; i < 4; i++)
                {
                    Console.WriteLine("Point " + i + " | radius = " + circles[i].Radius);
                    pts[i] = new CvPoint2D32f(circles[i].Center.X, circles[i].Center.Y);
                }

                return(SortPoints(pts));
            }

            return(null);
        }
Exemplo n.º 25
0
        /// <summary>
        /// Преобразует координаты точек на изображении в условные координа точек в реальном мире
        /// </summary>
        /// <param name="imagePoints">Точки изображения</param>
        /// <returns>Точки в реальном мире</returns>
        public CvPoint2D32f[] GetRealPoints(CvPoint2D32f[] imagePoints)
        {
            // Определяем массивы и матрицы
            CvMat imagePointsMat       = new CvMat(imagePoints.Length, 1, MatrixType.F32C2, imagePoints);
            CvMat undistortedPointsMat = new CvMat(imagePoints.Length, 1, MatrixType.F32C2);
            CvMat realPointsMat        = new CvMat(imagePoints.Length, 1, MatrixType.F32C2);

            CvPoint2D32f[] realPoints = new CvPoint2D32f[imagePoints.Length];

            // Боремся с дисторсией
            Cv.Undistort2(imagePointsMat, undistortedPointsMat, Intrinsic, Distortion);

            // Применяем матрицу перехода
            Cv.PerspectiveTransform(imagePointsMat, realPointsMat, TransformationMatrix);

            // Преобразуем координаты на изображении в реальные
            for (int i = 0; i < imagePoints.Length; i++)
            {
                realPoints[i].X = (float)(realPointsMat[i].Val0);
                realPoints[i].Y = (float)(realPointsMat[i].Val1);
            }

            return(realPoints);
        }
Exemplo n.º 26
0
        unsafe public List <float[]> MakeDiscriptor(Bitmap ibmp, List <CvSURFPoint> keypoints)
        {
            Rectangle rect = new Rectangle(0, 0, ibmp.Width, ibmp.Height);

            System.Drawing.Imaging.BitmapData data = ibmp.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite,
                                                                   ibmp.PixelFormat);
            CvMemStorage storage = Cv.CreateMemStorage();

            //const int descriptor_data_type = CV_32F;

            int descriptor_size = extended ? 128 : 64;


            //CvSeq descriptors = Cv.CreateSeq(SeqType.Zero, CvSeq.SizeOf, descriptor_size * CvSeq<float>.SizeOf, storage);
            List <float[]> descriptors = new List <float[]>();


            //descriptors = (CvSeq)

            //float [] fZero = new float [1];
            //fZero[0] = 0;

            //Cv.SeqPushMulti<float>(descriptors,fZero, InsertPosition.Back);

            //descriptors = cvCreateSeq( 0, sizeof(CvSeq),
            //    descriptor_size*CV_ELEM_SIZE(descriptor_data_type), storage );
            //cvSeqPushMulti( descriptors, 0, N );


            const int   ORI_RADIUS = 6;
            const float ORI_SIGMA  = 2.5f;

            float[] G = getGaussianKernel(2 * ORI_RADIUS + 1, ORI_SIGMA);

            const int max_ori_samples = (2 * ORI_RADIUS + 1) * (2 * ORI_RADIUS + 1);

            CvPoint[] apt   = new CvPoint[max_ori_samples];
            float[]   apt_w = new float[max_ori_samples];

            int nangle0 = 0;

            for (int i = -ORI_RADIUS; i <= ORI_RADIUS; i++)
            {
                for (int j = -ORI_RADIUS; j <= ORI_RADIUS; j++)
                {
                    if (i * i + j * j <= ORI_RADIUS * ORI_RADIUS)
                    {
                        apt[nangle0]     = new CvPoint(j, i);
                        apt_w[nangle0++] = G[i + ORI_RADIUS] * G[j + ORI_RADIUS];
                    }
                }
            }

            const int PATCH_SZ = 20;

            float[,] DW = new float[PATCH_SZ, PATCH_SZ];

            /* Standard deviation of the Gaussian used to weight the gradient samples
             * used to build a keypoint descriptor */

            const float DESC_SIGMA = 3.3f;

            CvMat _DW = new CvMat(PATCH_SZ, PATCH_SZ, MatrixType.F32C1, DW);

            /* Gaussian used to weight descriptor samples */
            {
                double c2 = 1 / (DESC_SIGMA * DESC_SIGMA * 2);
                double gs = 0;
                for (int i = 0; i < PATCH_SZ; i++)
                {
                    for (int j = 0; j < PATCH_SZ; j++)
                    {
                        double x = j - (float)(PATCH_SZ - 1) / 2, y = i - (float)(PATCH_SZ - 1) / 2;
                        double val = Math.Exp(-(x * x + y * y) * c2);
                        DW[i, j] = (float)val;
                        gs      += val;
                    }
                }
                Cv.Scale(_DW, _DW, 1 / gs);
            }

            const int NX = 2, NY = 2;

            int [,] dx_s = { { 0, 0, 2, 4, -1 }, { 2, 0, 4, 4, 1 } };
            int [,] dy_s = { { 0, 0, 4, 2, 1 }, { 0, 2, 4, 4, -1 } };


            /* Increment used for the orientation sliding window (in degrees) */
            const int ORI_SEARCH_INC = 5;

            /* The size of the sliding window (in degrees) used to assign an
             * orientation */
            const int ORI_WIN = 60;


            CvMat win_bufs = null;


            fixed(int *imgPtr = integralMap)
            {
                byte *img = (byte *)(data.Scan0);


                for (int k = 0; k < keypoints.Count; k++)
                {
                    //const int* sum_ptr = sum->data.i;
                    int      sum_cols = _Width;
                    int      i, j, kk, x, y, nangle;
                    float [] X     = new float [max_ori_samples];
                    float [] Y     = new float [max_ori_samples];
                    float [] angle = new float [max_ori_samples];

                    //uchar PATCH[PATCH_SZ+1][PATCH_SZ+1];
                    byte [,] PATCH = new byte [PATCH_SZ + 1, PATCH_SZ + 1];

                    float [,] DX = new float [PATCH_SZ, PATCH_SZ];
                    float [,] DY = new float [PATCH_SZ, PATCH_SZ];

                    CvMat _X     = new CvMat(1, max_ori_samples, MatrixType.F32C1, X);
                    CvMat _Y     = new CvMat(1, max_ori_samples, MatrixType.F32C1, Y);
                    CvMat _angle = new CvMat(1, max_ori_samples, MatrixType.F32C1, angle);

                    CvMat _patch = new CvMat(PATCH_SZ + 1, PATCH_SZ + 1, MatrixType.U8C1, PATCH);

                    //float* vec;

                    CvSurfHF [] dx_t = new CvSurfHF [NX];
                    CvSurfHF [] dy_t = new CvSurfHF [NY];

                    //int thread_idx = cvGetThreadNum();

                    CvSURFPoint kp = keypoints[k];//(CvSURFPoint)Cv.GetSeqElem<CvSURFPoint>( keypoints, k );

                    int size = kp.Size;

                    CvPoint2D32f center = kp.Pt;

                    /* The sampling intervals and wavelet sized for selecting an orientation
                     * and building the keypoint descriptor are defined relative to 's' */
                    float s = (float)size * 1.2f / 9.0f;

                    /* To find the dominant orientation, the gradients in x and y are
                     * sampled in a circle of radius 6s using wavelets of size 4s.
                     * We ensure the gradient wavelet size is even to ensure the
                     * wavelet pattern is balanced and symmetric around its center */
                    int grad_wav_size = (int)(2 * Math.Round(2 * s));

                    if (_Height < grad_wav_size || _Width < grad_wav_size)
                    {
                        /* when grad_wav_size is too big,
                         * the sampling of gradient will be meaningless
                         * mark keypoint for deletion. */
                        kp.Size = -1;

                        continue;
                    }

                    icvResizeHaarPattern(dx_s, dx_t, NX, 4, grad_wav_size, _Width);
                    icvResizeHaarPattern(dy_s, dy_t, NY, 4, grad_wav_size, _Width);

                    for (kk = 0, nangle = 0; kk < nangle0; kk++)
                    {
                        int * ptr;
                        float vx, vy;
                        x = (int)Math.Round(center.X + apt[kk].X * s - (float)(grad_wav_size - 1) / 2);
                        y = (int)Math.Round(center.Y + apt[kk].Y * s - (float)(grad_wav_size - 1) / 2);

                        if (y >= (_Height - grad_wav_size) ||
                            x >= (_Width - grad_wav_size))
                        {
                            continue;
                        }

                        ptr       = imgPtr + x + y * sum_cols;
                        vx        = icvCalcHaarPattern(ptr, dx_t, 2);
                        vy        = icvCalcHaarPattern(ptr, dy_t, 2);
                        X[nangle] = vx * apt_w[kk]; Y[nangle] = vy * apt_w[kk];
                        nangle++;
                    }
                    if (nangle == 0)
                    {
                        /* No gradient could be sampled because the keypoint is too
                         * near too one or more of the sides of the image. As we
                         * therefore cannot find a dominant direction, we skip this
                         * keypoint and mark it for later deletion from the sequence. */
                        kp.Size = -1;
                        continue;
                    }

                    _X.Cols = _Y.Cols = _angle.Cols = nangle;


                    //cvCartToPolar( &_X, &_Y, 0, &_angle, 1 );
                    Cv.CartToPolar(_X, _Y, null, _angle, AngleUnit.Degrees);

                    float bestx = 0, besty = 0, descriptor_mod = 0;


                    for (i = 0; i < 360; i += ORI_SEARCH_INC)
                    {
                        float sumx = 0, sumy = 0, temp_mod;
                        for (j = 0; j < nangle; j++)
                        {
                            int d = (int)Math.Abs(Math.Round(angle[j]) - i);

                            if (d < ORI_WIN / 2 || d > 360 - ORI_WIN / 2)
                            {
                                sumx += X[j];
                                sumy += Y[j];
                            }
                        }
                        temp_mod = sumx * sumx + sumy * sumy;
                        if (temp_mod > descriptor_mod)
                        {
                            descriptor_mod = temp_mod;
                            bestx          = sumx;
                            besty          = sumy;
                        }
                    }

                    float descriptor_dir = Cv.FastArctan(besty, bestx);


                    kp.Dir = descriptor_dir;

                    //if( !_descriptors )
                    //    continue;

                    descriptor_dir *= (float)(Math.PI / 180);

                    /* Extract a window of pixels around the keypoint of size 20s */
                    int win_size = (int)((PATCH_SZ + 1) * s);

                    //if( win_bufs[thread_idx] == 0 || win_bufs[thread_idx]->cols < win_size*win_size )
                    //{
                    //    cvReleaseMat( &win_bufs[thread_idx] );
                    //    win_bufs[thread_idx] = cvCreateMat( 1, win_size*win_size, CV_8U );
                    //}

                    // 초기화 처리 해야함
                    if (win_bufs == null || win_bufs.Cols < win_size * win_size)
                    {
                        //cvReleaseMat( &win_bufs[thread_idx] );

                        win_bufs = new CvMat(1, win_size * win_size, MatrixType.U8C1);
                    }



                    CvMat win = new CvMat(win_size, win_size, MatrixType.U8C1, win_bufs.ToArray());

                    float sin_dir = (float)Math.Sin(descriptor_dir);
                    float cos_dir = (float)Math.Cos(descriptor_dir);

                    /* Subpixel interpolation version (slower). Subpixel not required since
                     * the pixels will all get averaged when we scale down to 20 pixels */
                    /*
                     * float w[] = { cos_dir, sin_dir, center.x,
                     *            -sin_dir, cos_dir , center.y };
                     * CvMat W = cvMat(2, 3, CV_32F, w);
                     * cvGetQuadrangleSubPix( img, &win, &W );
                     */

                    /* Nearest neighbour version (faster) */
                    float win_offset = -(float)(win_size - 1) / 2;
                    float start_x    = center.X + win_offset * cos_dir + win_offset * sin_dir;
                    float start_y    = center.Y - win_offset * sin_dir + win_offset * cos_dir;

                    byte *WIN = (byte *)win.Data;

                    for (i = 0; i < win_size; i++, start_x += sin_dir, start_y += cos_dir)
                    {
                        float pixel_x = start_x;
                        float pixel_y = start_y;
                        for (j = 0; j < win_size; j++, pixel_x += cos_dir, pixel_y -= sin_dir)
                        {
                            x = (int)Math.Round(pixel_x);
                            y = (int)Math.Round(pixel_y);

                            x = Math.Max(x, 0);
                            y = Math.Max(y, 0);
                            x = Math.Min(x, _Width - 1);
                            y = Math.Min(y, _Height - 1);

                            WIN[i * win_size + j] = img[y * data.Stride + x];
                        }
                    }

                    /* Scale the window to size PATCH_SZ so each pixel's size is s. This
                     * makes calculating the gradients with wavelets of size 2s easy */

                    //cvResize( &win, &_patch, CV_INTER_AREA );
                    Cv.Resize(win, _patch, Interpolation.Area);

                    /* Calculate gradients in x and y with wavelets of size 2s */
                    for (i = 0; i < PATCH_SZ; i++)
                    {
                        for (j = 0; j < PATCH_SZ; j++)
                        {
                            float dw = DW[i, j];
                            float vx = (PATCH[i, j + 1] - PATCH[i, j] + PATCH[i + 1, j + 1] - PATCH[i + 1, j]) * dw;
                            float vy = (PATCH[i + 1, j] - PATCH[i, j] + PATCH[i + 1, j + 1] - PATCH[i, j + 1]) * dw;
                            DX[i, j] = vx;
                            DY[i, j] = vy;
                        }
                    }

                    /* Construct the descriptor */
                    //vec = (float*)cvGetSeqElem( descriptors, k );
                    //float* vec =(float*) Cv.SeqPushMulti<float*>( descriptors, k );
                    //Cv.getse
                    float [] _vec = new float [descriptor_size];

                    fixed(float *pVec = _vec)
                    {
                        float *vec = pVec;

                        //for( kk = 0; kk < descriptor_size; kk++ )
                        //    vec[kk] = 0;

                        double square_mag = 0;

                        //if( params.extended )
                        if (true)
                        {
                            /* 128-bin descriptor */
                            for (i = 0; i < 4; i++)
                            {
                                for (j = 0; j < 4; j++)
                                {
                                    for (y = i * 5; y < i * 5 + 5; y++)
                                    {
                                        for (x = j * 5; x < j * 5 + 5; x++)
                                        {
                                            float tx = DX[y, x], ty = DY[y, x];
                                            if (ty >= 0)
                                            {
                                                vec[0] += tx;
                                                vec[1] += (float)Math.Abs(tx);
                                            }
                                            else
                                            {
                                                vec[2] += tx;
                                                vec[3] += (float)Math.Abs(tx);
                                            }
                                            if (tx >= 0)
                                            {
                                                vec[4] += ty;
                                                vec[5] += (float)Math.Abs(ty);
                                            }
                                            else
                                            {
                                                vec[6] += ty;
                                                vec[7] += (float)Math.Abs(ty);
                                            }
                                        }
                                    }
                                    for (kk = 0; kk < 8; kk++)
                                    {
                                        square_mag += vec[kk] * vec[kk];
                                    }
                                    vec += 8;
                                }
                            }
                        }
                        else
                        {
                            /* 64-bin descriptor */
                            for (i = 0; i < 4; i++)
                            {
                                for (j = 0; j < 4; j++)
                                {
                                    for (y = i * 5; y < i * 5 + 5; y++)
                                    {
                                        for (x = j * 5; x < j * 5 + 5; x++)
                                        {
                                            float tx = DX[y, x], ty = DY[y, x];
                                            vec[0] += tx; vec[1] += ty;
                                            vec[2] += (float)Math.Abs(tx); vec[3] += (float)Math.Abs(ty);
                                        }
                                    }
                                    for (kk = 0; kk < 4; kk++)
                                    {
                                        square_mag += vec[kk] * vec[kk];
                                    }
                                    vec += 4;
                                }
                            }
                        }


                        vec = pVec;

                        /* unit vector is essential for contrast invariance */
                        //vec = (float*)cvGetSeqElem( descriptors, k );


                        double scale = 1 / (Math.Sqrt(square_mag) + double.Epsilon);

                        for (kk = 0; kk < descriptor_size; kk++)
                        {
                            vec[kk] = (float)(vec[kk] * scale);
                        }
                    }
                }
            }

            return(descriptors);
        }
Exemplo n.º 27
0
 /// <summary>
 ///
 /// </summary>
 /// <param name="img"></param>
 /// <param name="fp"></param>
 /// <param name="color"></param>
 private void DrawSubdivPoint(IplImage img, CvPoint2D32f fp, CvColor color)
 {
     img.Circle(fp, 3, color, Cv.FILLED, LineType.AntiAlias, 0);
 }
Exemplo n.º 28
0
    // Convert _outBox.BoxPoints (type CvPoint2D32f) into CvPoint[][] for use
    // in DrawPolyLine
    CvPoint[][] rectangleBoxPoint(CvPoint2D32f[] _box)
    {
        CvPoint[] pts = new CvPoint[_box.Length];
        for (int i = 0; i < _box.Length; i++)
            pts[i] = _box[i];  // Get the box coordinates (CvPoint)

        // Now we've got the 4 corners of the tracking box returned by CamShift
        // in a format that DrawPolyLine can use
        return (new CvPoint[][] { pts });
    }
Exemplo n.º 29
0
        /// <summary>
        /// 箱の頂点を見つける
        /// </summary>
        /// <param name="box">箱</param>
        /// <param name="pt">頂点の配列</param>
#else
        /// <summary>
        /// Finds box vertices
        /// </summary>
        /// <param name="box">Box</param>
        /// <param name="pt">Array of vertices</param>
#endif
        public static void BoxPoints(CvBox2D box, out CvPoint2D32f[] pt)
        {
            pt = new CvPoint2D32f[4];
            CvInvoke.cvBoxPoints(box, pt);

            // cvgeometry.cpp  line 89~103
            /*
            double angle = box.Angle * CvConst.CV_PI / 180.0;
            float a = (float)Math.Cos(angle)*0.5f;
            float b = (float)Math.Sin(angle)*0.5f;

            pt[0].X = box.Center.X - a * box.Size.Height - b * box.Size.Width;
            pt[0].Y = box.Center.Y + b * box.Size.Height - a * box.Size.Width;
            pt[1].X = box.Center.X + a * box.Size.Height - b * box.Size.Width;
            pt[1].Y = box.Center.Y - b * box.Size.Height - a * box.Size.Width;
            pt[2].X = 2 * box.Center.X - pt[0].X;
            pt[2].Y = 2 * box.Center.Y - pt[0].Y;
            pt[3].X = 2 * box.Center.X - pt[1].X;
            pt[3].Y = 2 * box.Center.Y - pt[1].Y;
            //*/
        }
Exemplo n.º 30
0
        public override void Process(System.Drawing.Bitmap [] frames)
        {
            lock (mutex)
            {
                Bitmap frame = frames[0];

                if (frame == null)
                {
                    throw new Exception("Frame is null!");
                }

                if (frame.Width != imageSize.Width || frame.Height != imageSize.Height)
                {
                    throw new Exception("Invalid frame sizes");
                }

                _curFrame.setImage(frame);


                ProcessLucasKanade();

                bool drawOnFrame = true;
                if (state.Equals(AHMTrackingState.NoFeature))
                {
                    drawOnFrame = false;
                    if (!autoStartMode.Equals(AutoStartMode.None))
                    {
                        long eyeLocatorNewTickCount = Environment.TickCount;

                        if (eyeLocatorNewTickCount - eyeLocatorTickCount > 10000)
                        {
                            if (!autoStartEnded)
                            {
                                trackingSuiteAdapter.SendMessage("Please Blink");
                                autoStartEnded = true;
                            }

                            eyeLocator.AddImage(frame);
                            if (eyeLocator.TrackingPointsFound)
                            {
                                CvPoint2D32f p = new CvPoint2D32f();

                                if (autoStartMode.Equals(AutoStartMode.LeftEye))
                                {
                                    p = eyeLocator.LeftEyeTrackingPoint;
                                }
                                else if (autoStartMode.Equals(AutoStartMode.RightEye))
                                {
                                    p = eyeLocator.RightEyeTrackingPoint;
                                }
                                else if (autoStartMode.Equals(AutoStartMode.NoseMouth))
                                {
                                    p = eyeLocator.MouseTrackingPoint;
                                }

                                eyeLocator.Reset();

                                imagePoint.X = (int)p.x;
                                imagePoint.Y = (int)p.y;
                                _current_track_points[0].x = p.x;
                                _current_track_points[0].y = p.y;
                                _last_track_points[0].x    = p.x;
                                _last_track_points[0].y    = p.y;

                                SetState(AHMTrackingState.AHMSetup);
                            }
                        }
                        else
                        {
                            int second = (int)Math.Round(((double)(10000 - (eyeLocatorNewTickCount - eyeLocatorTickCount))) / 1000.0);
                            trackingSuiteAdapter.SendMessage("Auto Start in " + second + " seconds");
                        }
                    }
                }
                else if (state.Equals(AHMTrackingState.Feature))
                {
                    if (autoStartMode != AutoStartMode.None)
                    {
                        SetState(AHMTrackingState.NoFeature);
                    }
                }
                else if (state.Equals(AHMTrackingState.Tracking))
                {
                    ProcessAHM();
                }
                else if (state.Equals(AHMTrackingState.AHMSetup))
                {
                    if (ahmSetup.Process(imagePoint, frames))
                    {
                        ahmSetup.DrawOnFrame(frames);
                        drawOnFrame = ahmSetup.DrawLucasKanade;
                    }
                    else
                    {
                        Thread t = new Thread(new ThreadStart(SetupFinished));
                        t.Start();

                        //SetupFinished();
                        drawOnFrame = false;
                        SetState(AHMTrackingState.Tracking);
                    }
                }

                if (extraTrackingInfo == null)
                {
                    extraTrackingInfo = new AHMStateExtraInfo(state);
                }
                else
                {
                    ((AHMStateExtraInfo)extraTrackingInfo).TrackingState = state;
                }

                if (drawOnFrame)
                {
                    DrawPointOnFrame(frame);
                }
            }
        }
Exemplo n.º 31
0
 public static extern void cvLinearPolar(IntPtr src, IntPtr dst, CvPoint2D32f center, double maxRadius, [MarshalAs(UnmanagedType.I4)] Interpolation flags);
Exemplo n.º 32
0
        private void buttonScalingScore_Click(object sender, RoutedEventArgs e)
        {
            int    cols, rows;
            double horizLength, vertLength;

            if (!parseChessboardParameters(out cols, out rows, out horizLength, out vertLength))
            {
                return;
            }

            // 以下改造
            MotionDataHandler handler;
            string            path;

            if (openMotionData(out handler, out path))
            {
                CvMat displayMat1 = null;
                CvMat displayMat3 = null;
                CvMat displayMat4 = null;
                CvMat gray        = null;

                int length = handler.FrameCount;
                if (length == 0)
                {
                    return;
                }

                CvSize boardSize = new CvSize(cols, rows);
                CvSize imageSize = new CvSize();
                List <Tuple <double, double> > pairs = new List <Tuple <double, double> >();
                CvPoint2D32f[] lastCorners           = null;

                IEnumerable <CvMat> colorImages, depthImages;
                Utility.LoadImages(handler.GetColorImagePaths(), out colorImages);
                Utility.LoadImages(handler.GetDepthImagePaths(), out depthImages);
                var images = colorImages.Zip(depthImages, (first, second) => Tuple.Create(first, second));

                foreach (Tuple <CvMat, CvMat> imagePair in images)
                {
                    CvMat imageMat = imagePair.Item1;
                    CvMat depthMat = imagePair.Item2;

                    if (displayMat4 == null)
                    {
                        displayMat4 = CvEx.InitCvMat(imageMat);
                    }

                    imageSize = new CvSize(imageMat.Cols, imageMat.Rows);
                    CvPoint2D32f[] corners;
                    int            count;
                    CvEx.InitCvMat(ref gray, imageMat, MatrixType.U8C1);
                    imageMat.CvtColor(gray, ColorConversion.RgbToGray);
                    if (gray.FindChessboardCorners(boardSize, out corners, out count, ChessboardFlag.AdaptiveThresh))
                    {
                        CvEx.CloneCvMat(ref displayMat1, imageMat);
                        CvTermCriteria criteria = new CvTermCriteria(50, 0.01);
                        gray.FindCornerSubPix(corners, count, new CvSize(3, 3), new CvSize(-1, -1), criteria);
                        CvPoint3D32f?[] cornerPoints = new CvPoint3D32f?[corners.Length];
                        for (int j = 0; j < corners.Length; j++)
                        {
                            CvPoint2D32f corner = corners[j];
                            double?      value  = CalcEx.BilateralFilterDepthMatSinglePixel(corner, depthMat, 100, 4, 9);
                            if (value.HasValue)
                            {
                                cornerPoints[j] = new CvPoint3D32f(corner.X, corner.Y, value.Value);
                            }
                        }
                        for (int x = 0; x < cols; x++)
                        {
                            for (int y = 0; y < rows; y++)
                            {
                                if (!cornerPoints[x + y * cols].HasValue)
                                {
                                    continue;
                                }
                                CvPoint3D32f point1          = cornerPoints[x + y * cols].Value;
                                CvPoint3D64f undistortPoint1 = this.UndistortionData.GetRealFromScreenPos(point1, imageSize);
                                foreach (var offset in new[] { new { X = 1, Y = 0, D = horizLength }, new { X = 0, Y = 1, D = vertLength } })
                                {
                                    int dx = x + offset.X;
                                    int dy = y + offset.Y;
                                    if (dx >= cols || dy >= rows)
                                    {
                                        continue;
                                    }
                                    if (!cornerPoints[dx + dy * cols].HasValue)
                                    {
                                        continue;
                                    }

                                    CvPoint3D32f point2          = cornerPoints[dx + dy * cols].Value;
                                    CvPoint3D64f undistortPoint2 = this.UndistortionData.GetRealFromScreenPos(point2, imageSize);
                                    double       distance        = Math.Sqrt(CvEx.GetDistanceSq(undistortPoint1, undistortPoint2));
                                    double       scale           = distance / offset.D;
                                    CvColor      color           = CalcEx.HSVtoRGB(Math.Max(0, Math.Min(300, scale * 600 - 450)), scale, 2 - scale);
                                    displayMat4.DrawLine((int)point1.X, (int)point1.Y, (int)point2.X, (int)point2.Y, new CvScalar(color.R, color.G, color.B), 1, LineType.AntiAlias);
                                    pairs.Add(new Tuple <double, double>(distance, offset.D));
                                }
                            }
                        }
                        CvEx.DrawChessboardCornerFrame(displayMat1, boardSize, corners, new CvScalar(64, 128, 64));
                        displayMat1.DrawChessboardCorners(boardSize, corners, true);
                        lastCorners = corners;
                        putImage(displayMat1, PixelFormats.Rgb24);
                    }
                    else
                    {
                        CvEx.CloneCvMat(ref displayMat3, imageMat);
                        putImage(displayMat3, PixelFormats.Rgb24);
                    }
                }

                CvMat displayMat2 = CvEx.InitCvMat(displayMat1);
                displayMat1.Undistort2(displayMat2, this.UndistortionData.CameraStruct.CreateCvMat(), this.UndistortionData.DistortStruct.CreateCvMat(true));
                if (lastCorners != null)
                {
                    drawUndistortedCornerFrame(displayMat2, lastCorners, boardSize);
                }
                displayMat2.PutText(string.Format("Min: {0}", pairs.Min(x => x.Item1 / x.Item2)), new CvPoint(20, 20), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 255));
                displayMat2.PutText(string.Format("Max: {0}", pairs.Max(x => x.Item1 / x.Item2)), new CvPoint(20, 40), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 255));
                displayMat2.PutText(string.Format("Avg: {0}", pairs.Average(x => x.Item1 / x.Item2)), new CvPoint(20, 60), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 255));
                displayMat2.PutText(string.Format("Med: {0}", CalcEx.GetMedian(pairs.Select(x => x.Item1 / x.Item2).ToList())), new CvPoint(20, 80), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 255));
                putImage(displayMat4, PixelFormats.Rgb24);
                displayLabels();
            }
        }
Exemplo n.º 33
0
 public static extern bool cvMinEnclosingCircle(IntPtr points, ref CvPoint2D32f center, ref float radius);
Exemplo n.º 34
0
 public BlinkLinkCMSExtraTrackingInfo(CvPoint2D32f leftEyePoint, CvPoint2D32f rightEyePoint)
 {
     this.LeftEyePoint  = new Point((int)leftEyePoint.x, (int)leftEyePoint.y);
     this.RightEyePoint = new Point((int)rightEyePoint.x, (int)rightEyePoint.y);;
 }
Exemplo n.º 35
0
        public void Run()
        {
            CvCapture cap = Cv.CreateCameraCapture(1);
            IplImage  pic = new IplImage("rump.jpg");

            Cv.Flip(pic, pic, FlipMode.Y);


            int    width  = 5;
            int    height = 4;
            int    sqares = 20;
            CvSize size   = new CvSize(width, height);

            CvMat wMatrix = Cv.CreateMat(3, 3, MatrixType.F32C1);

            CvPoint2D32f[] corners = new CvPoint2D32f[sqares];

            IplImage img;
            IplImage disp;
            IplImage cimg;
            IplImage nimg;

            int cornerCount;

            while (thread != null)
            {
                img = Cv.QueryFrame(cap);

                Cv.Flip(img, img, FlipMode.Y);

                disp = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);
                cimg = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);
                nimg = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);

                IplImage gray  = Cv.CreateImage(Cv.GetSize(img), img.Depth, 1);
                bool     found = Cv.FindChessboardCorners(img, size, out corners, out cornerCount, ChessboardFlag.AdaptiveThresh | ChessboardFlag.FilterQuads);

                Cv.CvtColor(img, gray, ColorConversion.BgrToGray);

                CvTermCriteria criteria = new CvTermCriteria(CriteriaType.Epsilon, 30, 0.1);
                Cv.FindCornerSubPix(gray, corners, cornerCount, new CvSize(11, 11), new CvSize(-1, -1), criteria);

                if (cornerCount == sqares)
                {
                    if (option == 1)
                    {
                        CvPoint2D32f[] p = new CvPoint2D32f[4];
                        CvPoint2D32f[] q = new CvPoint2D32f[4];

                        IplImage blank = Cv.CreateImage(Cv.GetSize(pic), BitDepth.U8, 3);

                        q[0].X = (float)pic.Width * 0;
                        q[0].Y = (float)pic.Height * 0;
                        q[1].X = (float)pic.Width;
                        q[1].Y = (float)pic.Height * 0;

                        q[2].X = (float)pic.Width;
                        q[2].Y = (float)pic.Height;
                        q[3].X = (float)pic.Width * 0;
                        q[3].Y = (float)pic.Height;

                        p[0].X = corners[0].X;
                        p[0].Y = corners[0].Y;
                        p[1].X = corners[4].X;
                        p[1].Y = corners[4].Y;

                        p[2].X = corners[19].X;
                        p[2].Y = corners[19].Y;
                        p[3].X = corners[15].X;
                        p[3].Y = corners[15].Y;

                        Cv.GetPerspectiveTransform(q, p, out wMatrix);

                        Cv.Zero(nimg);
                        Cv.Zero(cimg);

                        Cv.WarpPerspective(pic, nimg, wMatrix);
                        Cv.WarpPerspective(blank, cimg, wMatrix);
                        Cv.Not(cimg, cimg);

                        Cv.And(cimg, img, cimg);
                        Cv.Or(cimg, nimg, img);

                        Cv.Flip(img, img, FlipMode.Y);
                        Bitmap bm = BitmapConverter.ToBitmap(img);
                        bm.SetResolution(pictureBox1.Width, pictureBox1.Height);
                        pictureBox1.Image = bm;
                    }
                    else
                    {
                        CvPoint[] p = new CvPoint[4];

                        p[0].X = (int)corners[0].X;
                        p[0].Y = (int)corners[0].Y;
                        p[1].X = (int)corners[4].X;
                        p[1].Y = (int)corners[4].Y;

                        p[2].X = (int)corners[19].X;
                        p[2].Y = (int)corners[19].Y;
                        p[3].X = (int)corners[15].X;
                        p[3].Y = (int)corners[15].Y;

                        Cv.Line(img, p[0], p[1], CvColor.Red, 2);
                        Cv.Line(img, p[1], p[2], CvColor.Green, 2);
                        Cv.Line(img, p[2], p[3], CvColor.Blue, 2);
                        Cv.Line(img, p[3], p[0], CvColor.Yellow, 2);

                        Cv.DrawChessboardCorners(img, size, corners, found);
                        Cv.Flip(img, img, FlipMode.Y);
                        Bitmap bm = BitmapConverter.ToBitmap(img);
                        bm.SetResolution(pictureBox1.Width, pictureBox1.Height);
                        pictureBox1.Image = bm;
                    }
                }
                else
                {
                    Cv.Flip(gray, gray, FlipMode.Y);
                    Bitmap bm = BitmapConverter.ToBitmap(gray);
                    bm.SetResolution(pictureBox1.Width, pictureBox1.Height);
                    pictureBox1.Image = bm;
                }
            }
        }
Exemplo n.º 36
0
        public void Run()
        {
            // Training data
            var points    = new CvPoint2D32f[500];
            var responses = new int[points.Length];
            var rand      = new Random();

            for (int i = 0; i < responses.Length; i++)
            {
                double x = rand.Next(0, 300);
                double y = rand.Next(0, 300);
                points[i]    = new CvPoint2D32f(x, y);
                responses[i] = (y > f(x)) ? 1 : 2;
            }

            // Show training data and f(x)
            using (Mat pointsPlot = Mat.Zeros(300, 300, MatType.CV_8UC3))
            {
                for (int i = 0; i < points.Length; i++)
                {
                    int    x     = (int)points[i].X;
                    int    y     = (int)(300 - points[i].Y);
                    int    res   = responses[i];
                    Scalar color = (res == 1) ? Scalar.Red : Scalar.GreenYellow;
                    pointsPlot.Circle(x, y, 2, color, -1);
                }
                // f(x)
                for (int x = 1; x < 300; x++)
                {
                    int y1 = (int)(300 - f(x - 1));
                    int y2 = (int)(300 - f(x));
                    pointsPlot.Line(x - 1, y1, x, y2, Scalar.LightBlue, 1);
                }
                Window.ShowImages(pointsPlot);
            }

            // Train
            var dataMat = new Mat(points.Length, 2, MatType.CV_32FC1, points);
            var resMat  = new Mat(responses.Length, 1, MatType.CV_32SC1, responses);

            using (var svm = new CvSVM())
            {
                // normalize data
                dataMat /= 300.0;

                var criteria = TermCriteria.Both(1000, 0.000001);
                var param    = new CvSVMParams(
                    SVMType.CSvc,
                    SVMKernelType.Rbf,
                    100.0, // degree
                    100.0, // gamma
                    1.0,   // coeff0
                    1.0,   // c
                    0.5,   // nu
                    0.1,   // p
                    null,
                    criteria);
                svm.Train(dataMat, resMat, null, null, param);

                // Predict for each 300x300 pixel
                using (Mat retPlot = Mat.Zeros(300, 300, MatType.CV_8UC3))
                {
                    for (int x = 0; x < 300; x++)
                    {
                        for (int y = 0; y < 300; y++)
                        {
                            float[] sample    = { x / 300f, y / 300f };
                            var     sampleMat = new CvMat(1, 2, MatrixType.F32C1, sample);
                            int     ret       = (int)svm.Predict(sampleMat);
                            var     plotRect  = new CvRect(x, 300 - y, 1, 1);
                            if (ret == 1)
                            {
                                retPlot.Rectangle(plotRect, Scalar.Red);
                            }
                            else if (ret == 2)
                            {
                                retPlot.Rectangle(plotRect, Scalar.GreenYellow);
                            }
                        }
                    }
                    Window.ShowImages(retPlot);
                }
            }
        }
Exemplo n.º 37
0
        private void buttonTest0_Click(object sender, RoutedEventArgs e)
        {
            int    cols, rows;
            double horizLength, vertLength;

            if (!parseChessboardParameters(out cols, out rows, out horizLength, out vertLength))
            {
                return;
            }

            // 以下修正
            MotionDataHandler handler;
            string            path;

            if (openMotionData(out handler, out path))
            {
                CvMat displayMat1 = null;
                CvMat displayMat3 = null;
                CvMat displayMat4 = null;
                CvMat gray        = null;
                int   length      = handler.FrameCount;
                if (length == 0)
                {
                    return;
                }

                CvSize boardSize        = new CvSize(cols, rows);
                CvSize imageSize        = new CvSize();
                double minVarDistance2d = double.MaxValue;

                IEnumerable <CvMat> colorImages, depthImages;
                Utility.LoadImages(handler.GetColorImagePaths(), out colorImages);
                Utility.LoadImages(handler.GetDepthImagePaths(), out depthImages);
                var images = colorImages.Zip(depthImages, (first, second) => Tuple.Create(first, second));

                foreach (Tuple <CvMat, CvMat> imagePair in images)
                {
                    CvMat imageMat = imagePair.Item1;
                    CvMat depthMat = imagePair.Item2;

                    if (displayMat4 == null)
                    {
                        displayMat4 = CvEx.InitCvMat(imageMat);
                    }

                    imageSize = new CvSize(imageMat.Cols, imageMat.Rows);
                    CvSize depthUserSize = new CvSize(depthMat.Cols, depthMat.Rows);

                    CvPoint2D32f[] corners;
                    int            count;
                    CvEx.InitCvMat(ref gray, imageMat, MatrixType.U8C1);
                    imageMat.CvtColor(gray, ColorConversion.RgbToGray);
                    if (gray.FindChessboardCorners(boardSize, out corners, out count, ChessboardFlag.AdaptiveThresh))
                    {
                        CvEx.CloneCvMat(ref displayMat1, imageMat);
                        CvTermCriteria criteria = new CvTermCriteria(50, 0.01);
                        gray.FindCornerSubPix(corners, count, new CvSize(3, 3), new CvSize(-1, -1), criteria);
                        CvPoint3D32f?[] cornerPoints = new CvPoint3D32f?[corners.Length];
                        for (int j = 0; j < corners.Length; j++)
                        {
                            CvPoint2D32f corner = new CvPoint2D32f(corners[j].X - 10, corners[j].Y - 10);
                            double?      value  = CvEx.Get2DSubPixel(depthMat, corner, 0);
                            if (value.HasValue)
                            {
                                double depth = UndistortionData.UndistortDepth(corner.X, corner.Y, value.Value, depthUserSize);
                                cornerPoints[j] = new CvPoint3D32f(corner.X, corner.Y, depth);
                            }
                        }
                        List <double> distance2dList = new List <double>();
                        for (int x = 0; x < cols; x++)
                        {
                            for (int y = 0; y < rows; y++)
                            {
                                if (!cornerPoints[x + y * cols].HasValue)
                                {
                                    continue;
                                }
                                int nextX = x + 1;
                                if (nextX < cols)
                                {
                                    if (!cornerPoints[nextX + y * cols].HasValue)
                                    {
                                        continue;
                                    }
                                    CvPoint3D32f point     = cornerPoints[x + y * cols].Value;
                                    CvPoint3D32f nextPoint = cornerPoints[nextX + y * cols].Value;
                                    distance2dList.Add(Math.Sqrt(Math.Pow(point.X - nextPoint.X, 2) + Math.Pow(point.Y - nextPoint.Y, 2)));
                                }
                                int nextY = y + 1;
                                if (nextY < rows)
                                {
                                    if (!cornerPoints[x + nextY * cols].HasValue)
                                    {
                                        continue;
                                    }
                                    CvPoint3D32f point     = cornerPoints[x + y * cols].Value;
                                    CvPoint3D32f nextPoint = cornerPoints[x + nextY * cols].Value;
                                    distance2dList.Add(Math.Sqrt(Math.Pow(point.X - nextPoint.X, 2) + Math.Pow(point.Y - nextPoint.Y, 2)));
                                }
                            }
                        }
                        if (distance2dList.Count >= 2)
                        {
                            double stdevDistance2d = CalcEx.GetStdDev(distance2dList);
                            displayMat1.PutText(string.Format("{0:0.00}/{1:0.00}", stdevDistance2d, minVarDistance2d), new CvPoint(0, 20), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 0));
                            double avgDepth = cornerPoints.Where(p => p.HasValue).Select(p => p.Value.Z).Average();
                            for (int x = 0; x < cols; x++)
                            {
                                for (int y = 0; y < rows; y++)
                                {
                                    if (!cornerPoints[x + y * cols].HasValue)
                                    {
                                        continue;
                                    }
                                    CvPoint3D32f point = cornerPoints[x + y * cols].Value;
                                    displayMat1.PutText((point.Z - avgDepth).ToString("0.00"), new CvPoint((int)point.X, (int)point.Y), new CvFont(FontFace.HersheyPlain, 0.6, 0.6), new CvScalar(255, 0, 0));
                                    displayMat1.PutText(((point.Z - avgDepth) / avgDepth * 100).ToString("0.000"), new CvPoint((int)point.X, (int)point.Y + 12), new CvFont(FontFace.HersheyPlain, 0.6, 0.6), new CvScalar(0, 255, 255));
                                }
                            }
                            //displayMat1.DrawChessboardCorners(boardSize, corners, true);

                            if (stdevDistance2d < minVarDistance2d)
                            {
                                minVarDistance2d = stdevDistance2d;
                                CvEx.CloneCvMat(ref displayMat4, displayMat1);
                            }
                            //System.Threading.Thread.Sleep(500);
                        }
                        putImage(displayMat1, PixelFormats.Rgb24);
                    }
                    else
                    {
                        CvEx.CloneCvMat(ref displayMat3, imageMat);
                        putImage(displayMat3, PixelFormats.Rgb24);
                    }
                }

                putImage(displayMat4, PixelFormats.Rgb24);
                displayLabels();
            }
        }
Exemplo n.º 38
0
        //透視変換
        public Mat PerspectiveProject(Mat srcImg, CvPoint2D32f[] src_Pt, CvPoint2D32f[] dst_Pt)
        {
            Mat dstImg = new Mat();
            dstImg = srcImg.Clone();
            //透視変換
            CvMat perspective_matrix = Cv.GetPerspectiveTransform(src_Pt, dst_Pt);
            Cv.WarpPerspective(srcImg.ToCvMat(), dstImg.ToCvMat(), perspective_matrix, Interpolation.Cubic, new CvScalar(255, 0, 0));

            return dstImg;
        }
Exemplo n.º 39
0
        // => inputMat MUST be 24/32 bit
        private CvMat processFrame(CvMat inputMat)
        {
            // return "inputMat" after lots. LOTS. Of processing

            width  = inputMat.Cols;
            height = inputMat.Rows;

            // taking out 4% of the input's edges: sounds wrong
#if false
            // I have no idea what on earth is the purpose of this:
            //CvMat temp2 = inputMat( new CvRect( inputMat.Cols / 25, inputMat.Cols / 25, inputMat.Cols - 2 * (inputMat.Cols / 25), inputMat.Rows - 2 * (inputMat.Rows / 25) ) );
            //resize( temp2, temp2, inputMat.size() );
            //temp2.copyTo( inputMat );
            int    borderX = inputMat.Cols / 25;          // 4% of original
            int    borderY = inputMat.Rows / 25;
            CvRect roi     = new CvRect(borderX, borderY, inputMat.Cols - 2 * borderX, inputMat.Rows - 2 * borderY);
            CvMat  temp2   = inputMat.GetSubRect(out temp2, roi);            // stupid to pass "out temp2"?
            inputMat = temp2;
            // =TODO : What? temp2.Copy( inputMat );
            // is it really required to remove 4% of the input image's edges?
#endif

            CvMat inputMat_grey;
            {
                // TODO : looks like a waste to make two conversions from inputMat to _grey, instead of 1
                // since OpenCV doesn't support it, it could be made manually
                CvMat inputMat_grey8 = MatOps.ConvertChannels(inputMat);
                inputMat_grey = MatOps.ConvertElements(inputMat_grey8, MatrixType.F32C1, 1.0 / 255.0);
            }

            // NOTE : IBO seems to give good contrast with certain images, but with bbox7, it is just disastrous.
            //MatOps.NewWindowShow( inputMat_grey );
            //inputMat_grey = Filters.IBO( inputMat_grey ); // inputMat_grey = 32f
            //MatOps.NewWindowShow( inputMat_grey );
            inputMat_grey = MatOps.ConvertElements(inputMat_grey, MatrixType.U8C1, 255);               // inputMat_grey = 8u
            // was: SLOW : Filters.ContrastEnhancement( inputMat_grey ); // NOTE : not needed AFTER IBO
            // NOTE : Contrast Enhancement2 may NOT be needed AT ALL, at this point at least, ANYWAY!!!
            Filters.ContrastEnhancement2(inputMat_grey);               // NOTE : certainly NOT needed AFTER IBO
            MatOps.NewWindowShow(inputMat_grey);

            // mask passed originally in method below was all white, so I optimized it out. Passing the number of pixels was also dumb-o.
            double thresh = Filters.NeighborhoodValleyEmphasis(inputMat_grey);
            Cv.Threshold(inputMat_grey, inputMat_grey, thresh, 255, ThresholdType.BinaryInv);

            IplConvKernel element = new IplConvKernel(3, 3, 1, 1, ElementShape.Cross);
            Cv.Erode(inputMat_grey, inputMat_grey, element);
            Cv.Dilate(inputMat_grey, inputMat_grey, element);
            MatOps.NewWindowShow(inputMat_grey);

            // TODO : check if check is required
            if (inputMat_grey.ElemType != MatrixType.U8C1)
            {
                inputMat_grey = MatOps.ConvertElements(inputMat_grey, MatrixType.U8C1, 255.0);
            }

            // =======
            // is this just a test?
            CvPoint[] newPtV = Filters.DistillContours(inputMat_grey, 5, Const.PointZero);
            CvMat     imageDest;
            using (CvMemStorage storage = new CvMemStorage())
            {
                CvSeq <CvPoint> updateContours = CvSeq <CvPoint> .FromArray(newPtV, SeqType.Contour, storage);

                imageDest = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1);
                Cv.DrawContours(imageDest, updateContours, Const.ScalarWhite, 0, 100, 16);
            }
            // =======

            kawane(newPtV);               // updates thresholdDist, minMaskY, final4P

            //*******************************************set a greater contour for estimation of the missing points*******************************//

            // =======
            newPtV = Filters.DistillContours(inputMat_grey, 100, Const.PointZero);
            using (CvMemStorage storage = new CvMemStorage())
            {
                CvSeq <CvPoint> updateContours = CvSeq <CvPoint> .FromArray(newPtV, SeqType.Contour, storage);

                imageDest = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1);
                Cv.DrawContours(imageDest, updateContours, Const.ScalarWhite, 0, 100, 1, LineType.AntiAlias);
            }
            // =======

            CvMat mask1 = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1, 0);
            Cv.FillConvexPoly(mask1, newPtV, Const.ScalarWhite, 0, 0);

            temp = MatOps.ConvertChannels(inputMat);
            temp.Copy(imageDest, mask1);
            Cv.Canny(imageDest, imageDest, 150, 300, ApertureSize.Size3);
            IplConvKernel element2 = new IplConvKernel(3, 3, 1, 1, ElementShape.Rect);
            Cv.Dilate(imageDest, imageDest, element2);
            Cv.Erode(imageDest, imageDest, element2);

            CvLineSegmentPoint[] lines = Cv2.HoughLinesP(new Mat(imageDest), 1, Cv.PI / 180 /*NOTE : 1 degree angle*/, 50, 50, 50); // TODO : those 50s..?
            extendLines(lines, 350);                                                                                                // TODO : This idea sounds arbitary? And why 350? At least some percentage?

            // draw extended lines
            for (int i = 0; i < lines.Length; ++i)
            {
                CvLineSegmentPoint l = lines[i];
                Cv.Line(imageDest, l.P1, l.P2, Const.ScalarWhite, 1, LineType.AntiAlias);
            }

            Cv.Dilate(imageDest, imageDest, element2);               // TODO : FIX : Dilate again?!

            // another huge function here...
            fourPoints(lines);

            ////////////

            //********************************************************************* replace estimate points with mask corners ********//
            if (oldPt.Count != 0)
            {
                //**
                // BEWARE : great use of the English language following right below:
                // test for each and every one of the last slice delete each one of all the revisited of the above and estimate for only the best the off topic adapt
                //**
                List <int> positions = new List <int>(final4P.Count);
                for (int i = 0; i < final4P.Count; ++i)
                {
                    positions.Add(-1);                       // "initialize" positions[i]
                    double distmin = 10000;
                    for (int j = 0; j < oldPt.Count; ++j)
                    {
                        double distAB = PointOps.Norm(oldPt[j] - final4P[i]);
                        if (distAB < distmin)
                        {
                            distmin      = distAB;
                            positions[i] = j;
                        }
                    }
                }
                int flagFrCounter = 0;
                for (int i = 0; i < final4P.Count; ++i)
                {
                    double distA = PointOps.Norm(oldPt[positions[i]] - final4P[i]);
                    //********************* threshold pou na orizei tin megisti perioxi gia anazitisi,alliws na krataei to proigoumeno simeio*******//

                    if (distA < thresholdDist)                     //if(distA<80)
                    {
                        oldPt[positions[i]] = final4P[i];
                        --flagFrCounter;
                    }
                    ++flagFrCounter;
                }
                if (reset)
                {
                    numFrames = 0;
                    oldPt.Clear();
                    final4P.Clear();
                }
            }
            //pointsb[0]=thresholdDist;
            //****************************************************************************//

            for (int i = 0; i < oldPt.Count; ++i)
            {
                Cv.Circle(temp, oldPt[i], 2, Const.ScalarRed, 3);
            }
            MatOps.Convert8To24(temp).Copy(inputMat);
            //MatOps.ConvertChannels( temp, ColorConversion.GrayToBgr ).Copy( inputMat );
            //temp.Copy( inputMat );



            //******************************************************OVERLAY IMAGE***********************************************//////
            if (oldPt.Count == 0)
            {
                return(inputMat);                // end of line
            }
            CvMat black2;
            if (overlay != null)
            {
                black2 = overlay.Clone();                                   //=imread("cubes.jpg");
                Cv.Resize(black2, inputMat, Interpolation.NearestNeighbor); // TODO : check if interpolation type is appropriate
            }
            else
            {
                black2 = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C3);
            }

            List <CvPoint> tempPoint = new List <CvPoint>(4);
            //vector<Point> tempPoint;
            int pp = 0;

            // BEWARE : the guy is copy/pasting needlessly?
            int mini = 1000000;
            for (int i = 0; i < oldPt.Count; ++i)
            {
                if (oldPt[i].Y < mini)
                {
                    mini = oldPt[i].Y;
                    pp   = i;
                }
            }
            tempPoint.Add(oldPt[pp]);
            mini = 1000000;
            for (int i = 0; i < oldPt.Count; ++i)
            {
                if (oldPt[i].Y < mini && oldPt[i] != tempPoint[0])
                {
                    mini = oldPt[i].Y;
                    pp   = i;
                }
            }
            tempPoint.Add(oldPt[pp]);
            mini = 1000000;
            for (int i = 0; i < oldPt.Count; ++i)
            {
                int tempmini = Math.Abs(oldPt[i].X - tempPoint[1].X);
                if (tempmini < mini && oldPt[i] != tempPoint[0] && oldPt[i] != tempPoint[1])
                {
                    mini = tempmini;
                    pp   = i;
                }
            }
            tempPoint.Add(oldPt[pp]);

            for (int i = 0; i < oldPt.Count; ++i)
            {
                CvPoint pt    = oldPt[i];
                bool    found = false;
                for (int j = 0; j < tempPoint.Count; ++j)
                {
                    if (tempPoint[j] == pt)
                    {
                        found = true; break;
                    }
                }
                if (!found)
                {
                    tempPoint.Add(pt);
                }
            }

            // only keep up to 4 points
            List <CvPoint> co_ordinates = new List <CvPoint>(4);
            {
                int maxIndex = Math.Min(4, tempPoint.Count);
                for (int i = 0; i < maxIndex; ++i)
                {
                    co_ordinates.Add(tempPoint[i]);
                }
            }

            // lost me...
            if (outputQuad[0] == outputQuad[2])
            {
                {
                    int maxIndex = Math.Min(4, tempPoint.Count);
                    for (int i = 0; i < maxIndex; ++i)
                    {
                        outputQuad[i] = tempPoint[i];
                    }
                }
            }
            else
            {
                CvPoint2D32f rr;
                for (int i = 0; i < 4; ++i)
                {
                    List <double> dist = new List <double>(tempPoint.Count);
                    for (int j = 0; j < tempPoint.Count; ++j)
                    {
                        rr = tempPoint[j];
                        dist.Add(PointOps.Norm(outputQuad[i] - rr));
                    }

                    double minimumDist = dist.Min();
                    int    min_pos     = Utils.FindIndex(dist, minimumDist);
                    if (tempPoint.Count > 0)
                    {
                        outputQuad[i] = tempPoint[min_pos];
                        tempPoint.RemoveAt(min_pos);
                    }
                }
            }


            // The 4 points where the mapping is to be done , from top-left in clockwise order
            inputQuad[0] = new CvPoint2D32f(0, 0);
            inputQuad[1] = new CvPoint2D32f(inputMat.Cols - 1, 0);
            inputQuad[2] = new CvPoint2D32f(inputMat.Cols - 1, inputMat.Rows - 1);
            inputQuad[3] = new CvPoint2D32f(0, inputMat.Rows - 1);
            //Input and Output Image;


            // Get the Perspective Transform Matrix i.e. lambda (2D warp transform)
            // Lambda Matrix
            CvMat lambda = Cv.GetPerspectiveTransform(inputQuad, outputQuad);
            // Apply this Perspective Transform to the src image
            // - get a "top-down" view of the supposedly box-y area
            Cv.WarpPerspective(black2, black2, lambda, Interpolation.Cubic, Const.ScalarBlack);
            // see nice explanation : http://www.pyimagesearch.com/2014/08/25/4-point-opencv-getperspective-transform-example/


            CvMat maskOV = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1, Const.ScalarBlack);
            using (CvMemStorage storage = new CvMemStorage())
            {
                CvSeq <CvPoint> updateContours = CvSeq <CvPoint> .FromArray(co_ordinates, SeqType.Contour, storage);

                imageDest = new CvMat(inputMat.Rows, inputMat.Cols, MatrixType.U8C1);
                Cv.DrawContours(maskOV, updateContours, Const.ScalarWhite, 0, 100, 16);
                //drawContours( maskOV, co_ordinates, 0, Scalar( 255 ), CV_FILLED, 8 );
            }

            double alpha = 0.8;
            double beta  = (1.0 - alpha);
            Cv.AddWeighted(black2, alpha, inputMat, beta, 0.0, black2);
            black2.Copy(inputMat, maskOV);

            return(inputMat);
        }
Exemplo n.º 40
0
        CvPoint2D32f[] changePtRange(CvPoint2D32f[] srcPt)
        {
            CvPoint2D32f[] dstPt = new CvPoint2D32f[srcPt.Length];

            for (int i = 0; i < srcPt.Length; i++)
            {
                dstPt[i] = new CvPoint2D32f(srcPt[i].X * this.imageWidth / this.canvasWidth,
                                               srcPt[i].Y * this.imageHeight / this.canvasHeight);
            }
                return dstPt;
        }
Exemplo n.º 41
0
 public static extern void cvLogPolar(IntPtr src, IntPtr dst, CvPoint2D32f center, double M, Interpolation flags);
Exemplo n.º 42
0
        public float[] calcNextFrame(CvMat input, Texture2D output = null)
        {
            float[] points = new float[8];             // return this

            ++numFrames;

            //input.Flip(input, FlipMode.Y); // TODO : to check if this is really needed. If yes, it SHOULD go inside "processFrame"
            // src_img == dest_img

            input = processFrame(input);

            if (numFrames < 20)
            {
                // SAY CHEEEESE
                //overlay.release();
                //putText( dest_img, "HOLD ", cvPoint( 30, 30 ),
                //		FONT_HERSHEY_COMPLEX_SMALL, 1, cvScalar( 0, 255, 0, 0 ), 1, CV_AA );
                for (int i = 0; i < 4; i++)
                {
                    outputQuad[i] = new CvPoint2D32f(0, 0);
                }
            }


            if (reset)
            {
                // SAY RESEEEEET
                //overlay.release();
                //putText( dest_img, "RESTART tracker", cvPoint( 30, dest_img.rows / 2 ),
                //		FONT_HERSHEY_COMPLEX_SMALL, 1, cvScalar( 255, 0, 0, 0 ), 1, CV_AA );
            }

            //input.Flip(input, FlipMode.Y); // TODO : to check if this is really needed

            if (output != null)
            {
                UUtils.CopyMatToTexture2D(input, output);
            }

            if (oldPtValid)
            {
                int j = 0;
                for (int i = 0; i < 4; ++i)
                {
                    points[j]     = oldPt[i].X;
                    points[j + 1] = oldPt[i].Y;
                    j++;
                }
            }
            else
            {
                // this points[] is dumb as hell, return something more meaningful
                for (int i = 0; i < 8; i += 2)
                {
                    points[i]     = outputQuad[i >> 1].X;
                    points[i + 1] = outputQuad[i >> 1].Y;
                }
            }

            return(points);
        }
Exemplo n.º 43
0
 public static extern double cvPointPolygonTest(IntPtr contour, CvPoint2D32f pt, int measure_dist);
Exemplo n.º 44
0
        //学習ファイルの作成
        public void TrainingExec(List <FaceFeature.FeatureValue> FeatureList)
        {
            //特徴量をMatに移し替える 2個で一つ
            //2個のfloat * LISTの大きさの配列
            double[] feature_array = new double[2 * FeatureList.Count];

            //特徴量をSVMで扱えるように配列に置き換える
            SetFeatureListToArray(FeatureList, ref feature_array);
            CvPoint2D32f[] feature_points = new CvPoint2D32f[feature_array.Length / 2];
            int            id             = 0;

            for (int i = 0; i < feature_array.Length / 2; i++)
            {
                feature_points[id].X = (float)feature_array[i * 2];
                feature_points[id].Y = (float)feature_array[i * 2 + 1];
                id++;
            }
            CvMat dataMat = new CvMat(feature_points.Length, 2, MatrixType.F32C1, feature_points, true);

            //これがラベル番号
            int[] id_array = new int[FeatureList.Count];
            for (int i = 0; i < id_array.Length; i++)
            {
                id_array[i] = FeatureList[i].ID;
            }
            CvMat resMat = new CvMat(id_array.Length, 1, MatrixType.S32C1, id_array, true);


            // dataとresponsesの様子を描画
            CvPoint2D32f[] points = new CvPoint2D32f[id_array.Length];
            int            idx    = 0;

            for (int i = 0; i < id_array.Length; i++)
            {
                points[idx].X = (float)feature_array[i * 2];
                points[idx].Y = (float)feature_array[i * 2 + 1];
                idx++;
            }

            //学習データを図にする
            Debug_DrawInputFeature(points, id_array);

            //デバッグ用 学習させる特徴量を出力する
            OutPut_FeatureAndID(points, id_array);

            //LibSVMのテスト
            //学習用のデータの読み込み
            SVMProblem problem     = SVMProblemHelper.Load(@"wine.txt");
            SVMProblem testProblem = SVMProblemHelper.Load(@"wine.txt");

            SVMParameter parameter = new SVMParameter();

            parameter.Type   = LibSVMsharp.SVMType.C_SVC;
            parameter.Kernel = LibSVMsharp.SVMKernelType.RBF;
            parameter.C      = 1;
            parameter.Gamma  = 1;

            SVMModel model = SVM.Train(problem, parameter);

            double[] target = new double[testProblem.Length];


            for (int i = 0; i < testProblem.Length; i++)
            {
                target[i] = SVM.Predict(model, testProblem.X[i]);
            }
            double accuracy = SVMHelper.EvaluateClassificationProblem(testProblem, target);


            //SVMの用意
            CvTermCriteria criteria = new CvTermCriteria(1000, 0.000001);
            CvSVMParams    param    = new CvSVMParams(
                OpenCvSharp.CPlusPlus.SVMType.CSvc,
                OpenCvSharp.CPlusPlus.SVMKernelType.Rbf,
                10.0,            // degree
                100.0,           // gamma        調整
                1.0,             // coeff0
                10.0,            // c               調整
                0.5,             // nu
                0.1,             // p
                null,
                criteria);

            //学習実行
            svm.Train(dataMat, resMat, null, null, param);

            Debug_DispPredict();
        }
Exemplo n.º 45
0
        private void buttonCalibrateScaleOffset_Click(object sender, RoutedEventArgs e)
        {
            int    cols, rows;
            double horizLength, vertLength;

            if (!parseChessboardParameters(out cols, out rows, out horizLength, out vertLength))
            {
                return;
            }

            // 以下改造
            MotionDataHandler handler;
            string            path;

            if (openMotionData(out handler, out path))
            {
                CvMat displayMat1 = null;
                CvMat displayMat3 = null;
                CvMat gray        = null;

                if (ProgressData.DoAction(progress =>
                {
                    int length = handler.FrameCount;
                    if (length == 0)
                    {
                        return;
                    }
                    progress.InitProgress("Find Chessboard...", length * 2);

                    CvSize boardSize = new CvSize(cols, rows);
                    List <CvPoint3D32f?[]> list = new List <CvPoint3D32f?[]>();
                    CvSize imageSize = new CvSize();
                    CvPoint2D32f[] lastCorners = null;

                    IEnumerable <CvMat> colorImages, depthImages;
                    Utility.LoadImages(handler.GetColorImagePaths(), out colorImages);
                    Utility.LoadImages(handler.GetDepthImagePaths(), out depthImages);
                    var images = colorImages.Zip(depthImages, (first, second) => Tuple.Create(first, second));

                    foreach (Tuple <CvMat, CvMat> imagePair in images)
                    {
                        progress.CurrentValue++;

                        CvMat imageMat = imagePair.Item1;
                        CvMat depthMat = imagePair.Item2;
                        imageSize = new CvSize(imageMat.Cols, imageMat.Rows);
                        CvPoint2D32f[] corners;
                        int count;
                        CvEx.InitCvMat(ref gray, imageMat, MatrixType.U8C1);
                        imageMat.CvtColor(gray, ColorConversion.RgbToGray);
                        if (gray.FindChessboardCorners(boardSize, out corners, out count, ChessboardFlag.AdaptiveThresh))
                        {
                            CvEx.CloneCvMat(ref displayMat1, imageMat);
                            CvTermCriteria criteria = new CvTermCriteria(50, 0.01);
                            gray.FindCornerSubPix(corners, count, new CvSize(3, 3), new CvSize(-1, -1), criteria);
                            CvPoint3D32f?[] cornerPoints = new CvPoint3D32f?[corners.Length];
                            for (int j = 0; j < corners.Length; j++)
                            {
                                CvPoint2D32f corner = corners[j];
                                double?value = CalcEx.BilateralFilterDepthMatSinglePixel(corner, depthMat, 100, 4, 9);
                                if (value.HasValue)
                                {
                                    cornerPoints[j] = new CvPoint3D32f(corner.X, corner.Y, value.Value);
                                }
                            }
                            list.Add(cornerPoints);
                            CvEx.DrawChessboardCornerFrame(displayMat1, boardSize, corners, new CvScalar(64, 128, 64));
                            displayMat1.DrawChessboardCorners(boardSize, corners, true);
                            lastCorners = corners;
                            //putImage(displayMat1, PixelFormats.Bgr24);
                        }
                        else
                        {
                            CvEx.CloneCvMat(ref displayMat3, imageMat);
                            //putImage(displayMat3, PixelFormats.Bgr24);
                        }
                    }
                    progress.SetProgress("Scale Offset Calibrating...", length);

                    this.UndistortionData.CalibrateRealScaleAndOffset(list, cols, rows, horizLength, vertLength, imageSize);
                    CvMat displayMat2 = CvEx.InitCvMat(displayMat1);
                    displayMat1.Undistort2(displayMat2, this.UndistortionData.CameraStruct.CreateCvMat(), this.UndistortionData.DistortStruct.CreateCvMat(true));
                    if (lastCorners != null)
                    {
                        drawUndistortedCornerFrame(displayMat2, lastCorners, boardSize);
                    }

                    displayMat2.PutText(string.Format("XScale: {0}", this.UndistortionData.XScale), new CvPoint(20, 20), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 255));
                    displayMat2.PutText(string.Format("YScale: {0}", this.UndistortionData.YScale), new CvPoint(20, 40), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 255));
                    displayMat2.PutText(string.Format("Zoffset: {0}", this.UndistortionData.ZOffset), new CvPoint(20, 60), new CvFont(FontFace.HersheyPlain, 1, 1), new CvScalar(255, 255, 255));
                    putImage(displayMat2, PixelFormats.Bgr24);
                }, "Calibrate Scale Offset", true))
                {
                    displayLabels();
                }
            }
        }
Exemplo n.º 46
0
        public void Start()
        {
            IsActive = true;
            StopFlag = true;
            var             im       = new IplImage(); // カメラ画像格納用の変数
            WriteableBitmap buff     = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Bgr24, null);
            WriteableBitmap grayBuff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Gray8, null);

            Task.Run(() =>
            {
                using (var cap = Cv.CreateCameraCapture(CameraNumber)) // カメラのキャプチャ
                {
                    Dispatcher dis = App.Current.Dispatcher;

                    //カメラ起動後にWBを書き換えないと、自動チェックが外れないため下記の処理を追加する
                    Wb = 3000;
                    Thread.Sleep(100);
                    Wb = 3100;
                    Thread.Sleep(100);
                    Wb = State.camProp.Whitebalance;

                    while (StopFlag)             // 任意のキーが入力されるまでカメラ映像を表示
                    {
                        try
                        {
                            Thread.Sleep(100);
                            if (FlagPropChange)
                            {
                                cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                                cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);
                                cap.SetCaptureProperty(CaptureProperty.Brightness, Brightness);
                                cap.SetCaptureProperty(CaptureProperty.Contrast, Contrast);
                                cap.SetCaptureProperty(CaptureProperty.Hue, Hue);
                                cap.SetCaptureProperty(CaptureProperty.Saturation, Saturation);
                                cap.SetCaptureProperty(CaptureProperty.Sharpness, Sharpness);
                                cap.SetCaptureProperty(CaptureProperty.Gamma, Gamma);
                                cap.SetCaptureProperty(CaptureProperty.Gain, Gain);
                                cap.SetCaptureProperty(CaptureProperty.Exposure, Exposure);//露出
                                dis.BeginInvoke(new Action(() => { FlagPropChange = false; }));
                            }

                            im = Cv.QueryFrame(cap);//画像取得
                            if (im == null)
                            {
                                continue;
                            }
                            if (IsActive == true)
                            {
                                IsActive = false;
                            }


                            //傾き補正
                            CvPoint2D32f center = new CvPoint2D32f(WIDTH / 2, HEIGHT / 2);
                            CvMat affineMatrix  = Cv.GetRotationMatrix2D(center, Theta, 1.0);
                            Cv.WarpAffine(im, im, affineMatrix);



                            //二値化表示
                            if (FlagBin)
                            {
                                var imbuff  = im.Clone();
                                var Binbuff = Binary(imbuff);

                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeFrame(Binbuff);
                                    WriteableBitmapConverter.ToWriteableBitmap(Binbuff, grayBuff);// カメラからフレーム(画像)を取得
                                    source = grayBuff;
                                    imbuff.Dispose();
                                    Binbuff.Dispose();
                                }));
                                continue;
                            }



                            //グリッド表示
                            if (FlagGrid)
                            {
                                foreach (var i in Enumerable.Range(0, HEIGHT / 10))
                                {
                                    var 行  = i * 10;
                                    var p1 = new CvPoint(0, 行);
                                    var p2 = new CvPoint(WIDTH, 行);
                                    im.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                foreach (var j in Enumerable.Range(0, WIDTH / 10))
                                {
                                    var 列  = j * 10;
                                    var p1 = new CvPoint(列, 0);
                                    var p2 = new CvPoint(列, HEIGHT);
                                    im.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagCross)
                            {
                                int Rad = 20;
                                var p0  = new CvPoint(CrossX, CrossY);
                                var pR  = new CvPoint(CrossX + Rad, CrossY);
                                var pL  = new CvPoint(CrossX - Rad, CrossY);
                                var pO  = new CvPoint(CrossX, CrossY - Rad);
                                var pU  = new CvPoint(CrossX, CrossY + Rad);
                                im.Line(p0, pR, CvColor.Red, 1, LineType.AntiAlias, 0);
                                im.Line(p0, pL, CvColor.Red, 1, LineType.AntiAlias, 0);
                                im.Line(p0, pO, CvColor.Red, 1, LineType.AntiAlias, 0);
                                im.Line(p0, pU, CvColor.Red, 1, LineType.AntiAlias, 0);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagFrame)
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeFrame(im);
                                    WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagNgFrame)//試験NGの場合
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeNgFrame(imageForTest);
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForTest, source);// カメラからフレーム(画像)を取得
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }
                            }


                            if (FlagHsv)
                            {
                                GetHsv(im);
                            }

                            if (FlagTestPic)
                            {
                                imageForTest = im.Clone();
                                FlagTestPic  = false;
                            }


                            //すべてのフラグがfalseならノーマル表示する
                            dis.BeginInvoke(new Action(() =>
                            {
                                WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                source = buff;
                            }
                                                       ));
                        }
                        catch
                        {
                            StopFlag = false;
                            MessageBox.Show("aaaa");
                            //カメラがたまにコケるので例外無視する処理を追加
                        }
                    }
                }
            });
        }
Exemplo n.º 47
0
 //座標系変換
 CvPoint2D32f[] changePt(System.Windows.Point[] Pt)
 {
     CvPoint2D32f[] dstPt = new CvPoint2D32f[Pt.Length];
     for (int i = 0; i < Pt.Length; i++)
     {
         dstPt[i].X = (int)Pt[i].X;
         dstPt[i].Y = (int)Pt[i].Y;
     }
     return dstPt;
 }
Exemplo n.º 48
0
        public void Start()
        {
            if (canExecute)
            {
                return;            //既にカメラが起動していたら何もしない ※stop忘れ防止 Stopするのを忘れてStartすると二重起動して異常動作します
            }
            IsActive   = true;
            canExecute = true;
            var             im = new IplImage(); // カメラ画像格納用の変数
            WriteableBitmap buff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Bgr24, null);
            WriteableBitmap grayBuff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Gray8, null);
            IplImage        _mapX, _mapY;
            var             dst = new IplImage();


            Task.Run(() =>
            {
                //Thread.Sleep(1000);

                try
                {
                    cap = Cv.CreateCameraCapture(CameraNumber); // カメラのキャプチ

                    cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                    cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);

                    SetWb();

                    var dis = App.Current.Dispatcher;

                    while (canExecute)             // 任意のキーが入力されるまでカメラ映像を表示
                    {
                        try
                        {
                            Thread.Sleep(100);
                            if (FlagPropChange)
                            {
                                cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                                cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);
                                cap.SetCaptureProperty(CaptureProperty.Brightness, Brightness);
                                cap.SetCaptureProperty(CaptureProperty.Contrast, Contrast);
                                cap.SetCaptureProperty(CaptureProperty.Hue, Hue);
                                cap.SetCaptureProperty(CaptureProperty.Saturation, Saturation);
                                cap.SetCaptureProperty(CaptureProperty.Sharpness, Sharpness);
                                cap.SetCaptureProperty(CaptureProperty.Gamma, Gamma);
                                cap.SetCaptureProperty(CaptureProperty.Gain, Gain);
                                cap.SetCaptureProperty(CaptureProperty.Exposure, Exposure);//露出
                                //cap.SetCaptureProperty(CaptureProperty.WhiteBalance, White);//Opencvsharp2/3 非対応

                                dis.BeginInvoke(new Action(() =>
                                {
                                    try
                                    {
                                        FlagPropChange = false;
                                    }
                                    catch
                                    {
                                        MessageBox.Show("カメラ異常");
                                        canExecute = false;
                                    }
                                }));
                            }

                            im = Cv.QueryFrame(cap);//画像取得
                            if (im == null)
                            {
                                continue;
                            }
                            if (IsActive == true)
                            {
                                IsActive = false;
                            }

                            dst = new IplImage(im.Size, im.Depth, im.NChannels);

                            //set rectify data
                            _mapX = Cv.CreateImage(im.Size, BitDepth.F32, 1);
                            _mapY = Cv.CreateImage(im.Size, BitDepth.F32, 1);
                            Cv.InitUndistortMap(_fileIntrinsic, _fileDistortion, _mapX, _mapY);
                            Cv.Remap(im, dst, _mapX, _mapY);


                            //傾き補正
                            CvPoint2D32f center = new CvPoint2D32f(WIDTH / 2, HEIGHT / 2);
                            CvMat affineMatrix  = Cv.GetRotationMatrix2D(center, Theta, 1.0);
                            //Cv.WarpAffine(im, im, affineMatrix);
                            Cv.WarpAffine(dst, dst, affineMatrix);

                            if (FlagTestPic)
                            {
                                imageForTest = dst.Clone();
                                FlagTestPic  = false;
                            }

                            if (FlagLabeling)
                            {
                                var imageForLabeling = new IplImage(WIDTH, HEIGHT, BitDepth.U8, 3);
                                var imbuff           = dst.Clone();
                                var Binbuff          = Binary(imbuff);
                                blobs = new CvBlobs(Binbuff);

                                blobs.RenderBlobs(dst, imageForLabeling);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForLabeling, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                    imageForLabeling.Dispose();
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }

                                continue;
                            }



                            //二値化表示
                            if (FlagBin)
                            {
                                var imbuff  = dst.Clone();
                                var Binbuff = Binary(imbuff);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(Binbuff, grayBuff);// カメラからフレーム(画像)を取得
                                    source = grayBuff;
                                }));
                                continue;
                            }



                            //グリッド表示
                            if (FlagGrid)
                            {
                                foreach (var i in Enumerable.Range(0, HEIGHT / 10))
                                {
                                    var 行  = i * 10;
                                    var p1 = new CvPoint(0, 行);
                                    var p2 = new CvPoint(WIDTH, 行);
                                    dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                foreach (var j in Enumerable.Range(0, WIDTH / 10))
                                {
                                    var 列  = j * 10;
                                    var p1 = new CvPoint(列, 0);
                                    var p2 = new CvPoint(列, HEIGHT);
                                    dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }



                            if (FlagFrame)
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeFrame(dst);
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagNgFrame)//試験NGの場合
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeNgFrame(imageForTest);
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForTest, source);// カメラからフレーム(画像)を取得
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }
                            }

                            if (FlagHsv)
                            {
                                GetHsv(dst);
                            }

                            //すべてのフラグがfalseならノーマル表示する
                            dis.BeginInvoke(new Action(() =>
                            {
                                try
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }
                                catch
                                {
                                    CamState   = false;
                                    canExecute = false;
                                }
                            }));
                        }
                        catch
                        {
                            //例外無視する処理を追加
                            CamState   = false;
                            canExecute = false;
                        }
                    }
                }
                catch
                {
                    CamState = false;
                }
                finally
                {
                    if (cap != null)
                    {
                        cap.Dispose();
                        cap = null;
                    }
                    IsActive = false;
                    Stopped  = true;
                }
            });
        }
Exemplo n.º 49
0
        public Mat CutImage(Mat srcImg, CvPoint2D32f[] srcPt)
        {
            try
            {
                this.srcMat = srcImg.Clone();



                this.CvPoints = new List<OpenCvSharp.CPlusPlus.Point>();
                for (int i = 0; i < srcPt.Length; i++)
                {
                    this.CvPoints.Add(new OpenCvSharp.CPlusPlus.Point(srcPt[i].X, srcPt[i].Y));
                }

                this.countMat.FillConvexPoly(this.CvPoints, new Scalar(255), LineType.Link8, 0);

                //
                unsafe
                {
                    byte* countPtr = this.countMat.DataPointer;
                    byte* srcPtr = this.srcMat.DataPointer;
                    byte* dstPtr = this.dstMat.DataPointer;

                    for (int i = 0; i < (this.imgWidth * this.imgHeight); i++)
                    {
                        if (*(countPtr + i) == 255)
                        {
                            for (int j = 0; j < this.channels; j++)
                            {
                                *(dstPtr + i * this.channels + j) = *(srcPtr + i * this.channels + j);

                            }
                        }
                        else
                        {
                            for (int j = 0; j < this.channels; j++)
                            {
                                *(dstPtr + i * this.channels + j) = 0;

                            }
                        }

                        *(countPtr + i) = 0;
                    }
                }
            }
            catch { }

            
            return this.dstMat;
            

        }
Exemplo n.º 50
0
 /*
  * /// <summary>
  * /// Cv.ExtractSURFを呼び出すためのヘルパーメソッド
  * /// </summary>
  * /// <param name="imageMat">1チャンネル8bit画像</param>
  * /// <param name="param">SURFに渡すパラメータ</param>
  * /// <param name="surfPoints">出力される特徴点の配列</param>
  * /// <param name="descriptorList">出力される特徴量の配列</param>
  * public static void SURF(CvMat imageMat, CvSURFParams param, out CvSURFPoint[] surfPoints, out List<float[]> descriptorList)
  * {
  *  using (CvMemStorage mems = new CvMemStorage())
  *  {
  *      descriptorList = new List<float[]>();
  *      CvSeq<CvSURFPoint> keyPoints;
  *      CvSeq<float> descSeq;
  *      Cv.ExtractSURF(imageMat, null, out keyPoints, out descSeq, mems, param);
  *      using (keyPoints)
  *      using (descSeq)
  *      {
  *          surfPoints = keyPoints.ToArray();
  *          // descriptor読むために回りくどい処理をしている。普通に読むと落ちる
  *          using (CvSeqReader descReader = new CvSeqReader())
  *          {
  *              Cv.StartReadSeq(descSeq, descReader);
  *              int len = (int)(descSeq.ElemSize / sizeof(float));
  *              for (int i = 0; i < surfPoints.Length; i++)
  *              {
  *                  float[] arr = new float[len];
  *                  Marshal.Copy(descReader.Ptr, arr, 0, len);
  *                  descriptorList.Add(arr);
  *                  Cv.NEXT_SEQ_ELEM(descSeq.ElemSize, descReader);
  *              }
  *          }
  *      }
  *  }
  * }
  */
 /// <summary>
 /// 1チャンネル画像のサブピクセル値を返します.座標の小数点以下が0の場合にその座標の値と等しくなります
 /// </summary>
 /// <param name="mat">1チャンネル画像</param>
 /// <param name="point">座標</param>
 /// <param name="invalidValue">無効とみなされる値</param>
 /// <returns></returns>
 public static double?Get2DSubPixel(CvMat mat, CvPoint2D32f point, double invalidValue)
 {
     return(Get2DSubPixel(mat, new CvPoint2D64f(point.X, point.Y), invalidValue));
 }
Exemplo n.º 51
0
        private void Run()
        {
            CvCapture cap = Cv.CreateCameraCapture(1);
            CvCapture vid = CvCapture.FromFile("trailer.avi");
            IplImage  pic = new IplImage("pic.jpg", LoadMode.AnyColor | LoadMode.AnyDepth);

            Cv.Flip(pic, pic, FlipMode.Y);

            int    b_width   = 5;
            int    b_height  = 4;
            int    b_squares = 20;
            CvSize b_size    = new CvSize(b_width, b_height);

            CvMat warp_matrix = Cv.CreateMat(3, 3, MatrixType.F32C1);

            CvPoint2D32f[] corners = new CvPoint2D32f[b_squares];

            IplImage img;
            IplImage frame;
            IplImage disp;
            IplImage cpy_img;
            IplImage neg_img;

            int corner_count;

            while (_thread != null)
            {
                img = Cv.QueryFrame(cap);

                Cv.Flip(img, img, FlipMode.Y);

                disp    = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);
                cpy_img = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);
                neg_img = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);

                IplImage gray  = Cv.CreateImage(Cv.GetSize(img), img.Depth, 1);
                bool     found = Cv.FindChessboardCorners(img, b_size, out corners, out corner_count, ChessboardFlag.AdaptiveThresh | ChessboardFlag.FilterQuads);

                Cv.CvtColor(img, gray, ColorConversion.BgrToGray);

                CvTermCriteria criteria = new CvTermCriteria(CriteriaType.Epsilon, 30, 0.1);
                Cv.FindCornerSubPix(gray, corners, corner_count, new CvSize(11, 11), new CvSize(-1, -1), criteria);

                if (corner_count == b_squares)
                {
                    if (_option == 1)
                    {
                        CvPoint2D32f[] p = new CvPoint2D32f[4];
                        CvPoint2D32f[] q = new CvPoint2D32f[4];

                        IplImage blank = Cv.CreateImage(Cv.GetSize(pic), BitDepth.U8, 3);
                        Cv.Zero(blank);

                        q[0].X = (float)pic.Width * 0;
                        q[0].Y = (float)pic.Height * 0;
                        q[1].X = (float)pic.Width;
                        q[1].Y = (float)pic.Height * 0;

                        q[2].X = (float)pic.Width;
                        q[2].Y = (float)pic.Height;
                        q[3].X = (float)pic.Width * 0;
                        q[3].Y = (float)pic.Height;

                        p[0].X = corners[0].X;
                        p[0].Y = corners[0].Y;
                        p[1].X = corners[4].X;
                        p[1].Y = corners[4].Y;

                        p[2].X = corners[19].X;
                        p[2].Y = corners[19].Y;
                        p[3].X = corners[15].X;
                        p[3].Y = corners[15].Y;

                        Cv.GetPerspectiveTransform(q, p, out warp_matrix);

                        Cv.Zero(neg_img);
                        Cv.Zero(cpy_img);

                        Cv.WarpPerspective(pic, neg_img, warp_matrix);
                        Cv.WarpPerspective(blank, cpy_img, warp_matrix);
                        Cv.Not(cpy_img, cpy_img);

                        Cv.And(cpy_img, img, cpy_img);
                        Cv.Or(cpy_img, neg_img, img);

                        Cv.Flip(img, img, FlipMode.Y);
                        //Cv.ShowImage("video", img);
                        Bitmap bm = BitmapConverter.ToBitmap(img);
                        bm.SetResolution(pictureBox1.Width, pictureBox1.Height);
                        pictureBox1.Image = bm;
                    }
                    else if (_option == 2)
                    {
                        CvPoint2D32f[] p = new CvPoint2D32f[4];
                        CvPoint2D32f[] q = new CvPoint2D32f[4];

                        frame = Cv.QueryFrame(vid);

                        Cv.Flip(frame, frame, FlipMode.Y);

                        IplImage blank = Cv.CreateImage(Cv.GetSize(frame), BitDepth.U8, 3);
                        Cv.Zero(blank);
                        Cv.Not(blank, blank);

                        q[0].X = (float)frame.Width * 0;
                        q[0].Y = (float)frame.Height * 0;
                        q[1].X = (float)frame.Width;
                        q[1].Y = (float)frame.Height * 0;

                        q[2].X = (float)frame.Width;
                        q[2].Y = (float)frame.Height;
                        q[3].X = (float)frame.Width * 0;
                        q[3].Y = (float)frame.Height;

                        p[0].X = corners[0].X;
                        p[0].Y = corners[0].Y;
                        p[1].X = corners[4].X;
                        p[1].Y = corners[4].Y;

                        p[2].X = corners[19].X;
                        p[2].Y = corners[19].Y;
                        p[3].X = corners[15].X;
                        p[3].Y = corners[15].Y;

                        Cv.GetPerspectiveTransform(q, p, out warp_matrix);

                        Cv.Zero(neg_img);
                        Cv.Zero(cpy_img);

                        Cv.WarpPerspective(frame, neg_img, warp_matrix);
                        Cv.WarpPerspective(blank, cpy_img, warp_matrix);
                        Cv.Not(cpy_img, cpy_img);

                        Cv.And(cpy_img, img, cpy_img);
                        Cv.Or(cpy_img, neg_img, img);

                        Cv.Flip(img, img, FlipMode.Y);
                        //Cv.ShowImage("video", img);
                        Bitmap bm = BitmapConverter.ToBitmap(img);
                        bm.SetResolution(pictureBox1.Width, pictureBox1.Height);
                        pictureBox1.Image = bm;
                    }
                    else
                    {/*
                      * CvPoint[] p = new CvPoint[4];
                      *
                      * p[0].X = (int)corners[0].X;
                      * p[0].Y = (int)corners[0].Y;
                      * p[1].X = (int)corners[4].X;
                      * p[1].Y = (int)corners[4].Y;
                      *
                      * p[2].X = (int)corners[19].X;
                      * p[2].Y = (int)corners[19].Y;
                      * p[3].X = (int)corners[15].X;
                      * p[3].Y = (int)corners[15].Y;
                      *
                      * Cv.Line(img, p[0], p[1], CvColor.Red, 2);
                      * Cv.Line(img, p[1], p[2], CvColor.Green, 2);
                      * Cv.Line(img, p[2], p[3], CvColor.Blue, 2);
                      * Cv.Line(img, p[3], p[0], CvColor.Yellow, 2);
                      */
                        //or
                        Cv.DrawChessboardCorners(img, b_size, corners, found);
                        Cv.Flip(img, img, FlipMode.Y);

                        //Cv.ShowImage("video", img);
                        Bitmap bm = BitmapConverter.ToBitmap(img);
                        bm.SetResolution(pictureBox1.Width, pictureBox1.Height);
                        pictureBox1.Image = bm;
                    }
                }
                else
                {
                    Cv.Flip(gray, gray, FlipMode.Y);
                    //Cv.ShowImage("video", gray);
                    Bitmap bm = BitmapConverter.ToBitmap(gray);
                    bm.SetResolution(pictureBox1.Width, pictureBox1.Height);
                    pictureBox1.Image = bm;
                }
            }
        }
Exemplo n.º 52
0
 /// <summary>
 /// チェスボードコーナーの過度の四点を結ぶ四角形を描画します
 /// </summary>
 /// <param name="mat"></param>
 /// <param name="pattern_size"></param>
 /// <param name="corner"></param>
 /// <param name="color"></param>
 public static void DrawChessboardCornerFrame(CvMat mat, CvSize pattern_size, CvPoint2D32f[] corner, CvScalar color)
 {
     CvPoint2D32f[] points = new CvPoint2D32f[] { corner[0], corner[pattern_size.Width - 1], corner[pattern_size.Width * pattern_size.Height - 1], corner[(pattern_size.Height - 1) * pattern_size.Width] };
     mat.DrawPolyLine(new CvPoint[][] { points.Select(p => new CvPoint((int)Math.Round(p.X), (int)Math.Round(p.Y))).ToArray() }, true, color, 1, LineType.AntiAlias);
 }
Exemplo n.º 53
0
        private void ReadData(ref ReportDB DB, ref string path, int day)
        {
            if (!File.Exists(path))
                return;
            try
            {
                StreamReader reader = new StreamReader(path);

                string tstr = "";

                string[] splt = { " : " };
                char[] splt2 = { '-', '(', ')', ' ' };
                char[] splt3 = { ',' };

                while (!reader.EndOfStream)
                {
                    tstr = reader.ReadLine();

                    string[] rst = tstr.Split(splt, StringSplitOptions.RemoveEmptyEntries);

                    string[] rst2 = rst[0].Split(splt2);

                    int hour = int.Parse(rst2[0]);
                    int minutes = int.Parse(rst2[1]);
                    double seconds = double.Parse(rst2[2]);

                    rst2 = rst[1].Split(splt2);

                    int tnum = int.Parse(rst2[2]);

                    for (int i = 0; i < tnum; i++)
                    {
                        TargetCoord tg = new TargetCoord();

                        tstr = reader.ReadLine();

                        rst = tstr.Split(splt, StringSplitOptions.RemoveEmptyEntries);

                        rst2 = rst[1].Split(splt2, StringSplitOptions.RemoveEmptyEntries);

                        tg.m_ID = rst[0];

                        int tgnum = -1;

                        for (int x = 0; x < DB.TGS.Count; x++)
                        {
                            TG tmptg = (TG)DB.TGS[x];

                            if (tmptg.m_ID == tg.m_ID)
                            {
                                tgnum = x;
                                break;
                            }
                        }

                        if (tgnum < 0)
                            continue;

                        TG srctg = (TG)DB.TGS[tgnum];

                        for (int x = 0; x < rst2.Length; x++)
                        {
                            rst = rst2[x].Split(splt3);
                            tg.Corners[x].X = float.Parse(rst[0]);
                            tg.Corners[x].Y = float.Parse(rst[1]);
                        }

                        double l1 = Math.Sqrt(Math.Pow(tg.Corners[0].X - tg.Corners[1].X, 2)
                            + Math.Pow(tg.Corners[0].Y - tg.Corners[1].Y, 2));

                        double l2 = Math.Sqrt(Math.Pow(tg.Corners[1].X - tg.Corners[2].X, 2)
                            + Math.Pow(tg.Corners[1].Y - tg.Corners[2].Y, 2));

                        tg.nWidth = l2;
                        tg.nHeight = l1;
                        CalcCenter(ref tg);

                        double sx = 0, sy = 0, ang = 0;

                        //CalcProjectivity(ref m_Images[i - 1].m_Targets[k].m_Corners, ref tg.Corners, ref sx, ref sy, ref ang);

                        //회전량 계산
                        CvPoint2D32f[] srcPoint = new CvPoint2D32f[3];
                        CvPoint2D32f[] dstPoint = new CvPoint2D32f[3];

                        for (int x = 0; x < 3; x++)
                        {
                            srcPoint[x] = new CvPoint2D32f(srctg.m_Corners[x].X, srctg.m_Corners[x].Y);
                            dstPoint[x] = new CvPoint2D32f(tg.Corners[x].X, tg.Corners[x].Y);
                        }

                        CvMat mapMatrix = Cv.GetAffineTransform(srcPoint, dstPoint);

                        double a11 = Cv.GetReal2D(mapMatrix, 0, 0);
                        double a12 = Cv.GetReal2D(mapMatrix, 0, 1);
                        double a21 = Cv.GetReal2D(mapMatrix, 1, 0);
                        double a22 = Cv.GetReal2D(mapMatrix, 1, 1);

                        sx = Math.Sqrt(a11 * a11 + a21 * a21);
                        sy = Math.Sqrt(a12 * a12 + a22 * a22);

                        if (sx == 0)
                            ang = 0;
                        else
                            ang = Math.Abs(Math.Asin(-a21 / sx) * 180 / Math.PI);

                        //면적변화 계산

                        sx = srcPoint[0].X * (srcPoint[1].Y - srcPoint[2].Y) + srcPoint[1].X * (srcPoint[2].Y - srcPoint[0].Y) + srcPoint[2].X * (srcPoint[0].Y - srcPoint[1].Y);
                        sy = dstPoint[0].X * (dstPoint[1].Y - dstPoint[2].Y) + dstPoint[1].X * (dstPoint[2].Y - dstPoint[0].Y) + dstPoint[2].X * (dstPoint[0].Y - dstPoint[1].Y);

                        sx = sx * Math.Pow(m_TargetWidth / srctg.GetCornerWidth(), 2);
                        sy = sy * Math.Pow(m_TargetWidth / srctg.GetCornerWidth(), 2);

                        double scale = 0;

                        scale = Math.Abs((sx - sy) / sx * 100);

                        //중심변위계산////////////////////////

                        double diffx = Math.Abs(srctg.ix - tg.x);
                        double diffy = Math.Abs(srctg.iy - tg.y);

                        if (srctg.GetCornerWidth() == 0 || srctg.GetCornerHeight() == 0)
                            continue;

                        diffx = m_TargetWidth / srctg.GetCornerWidth() * diffx;
                        diffy = m_TargetHeight / srctg.GetCornerHeight() * diffy;

                        //DB에 추가
                        COORD tdb = new COORD();
                        tdb.dx = diffx;
                        tdb.dy = diffy;
                        tdb.ang = ang;
                        tdb.scale = scale;

                        tdb.day = day;
                        tdb.hour = hour;
                        tdb.min = minutes;
                        tdb.sec = seconds;

                        srctg.Arr.Add(tdb);
                    }
                }

                reader.Close();
            }
            catch (Exception ee)
            {
                MessageBox.Show(ee.ToString());
                return;
            }
        }
Exemplo n.º 54
0
 public static extern IntPtr imgproc_getRotationMatrix2D(CvPoint2D32f center, double angle, double scale);