示例#1
0
        public TrackPointContext(CvSize imageSize, int maxPointsCount)
        {
            gray = new IplImage(imageSize, 8, 1);
            pyramid = new IplImage(imageSize, 8, 1);

            points = new StructureSafeMemoryBox<CvPoint2D32f>(maxPointsCount);
            isFading = new bool[maxPointsCount];
        }
示例#2
0
        private void OnInitialize(IplImage frame)
        {
            lastFrame = new IplImage(frame.Size,
                CxCore.IPL_DEPTH_8U, frame.Channels);
            flipImage = frame.Origin == CxCore.IPL_ORIGIN_BL;

            PointTracker.InitializeTrackPoints(lastFrame);
        }
示例#3
0
        int[] GetAveragePixelsColor(IplImage image, int x, int y, int width, int height)
        {
            double r = 0, g = 0, b = 0;
            for (int i = 0; i < height; i++)
            {
                for (int j = 0; j < width; j++)
                {
                    CvScalar color = image.GetPixel(x + j, y + i);
                    r += color.R;
                    g += color.G;
                    b += color.B;
                }
            }
            int pixelsCount = height * width;

            return new int[] { (int)(r / pixelsCount), (int)(g / pixelsCount), (int)(b / pixelsCount) };
        }
示例#4
0
        public static void TrackPoints(IplImage image, List<CvPoint> rects)
        {
            if (rects.Count == 0)
            {
                needToInitialize = true;
                return;
            }

            CvSize imageSize = image.Size;
            image.ConvertColor(context1.Gray, Cv.CV_BGR2GRAY);

            if (needToInitialize)
            {
                using (IplImage eig = new IplImage(imageSize, 32, 1),
                    temp = new IplImage(imageSize, 32, 1))
                {
                    int k = 0;
                    for (int i = 0; i < rects.Count; i++)
                    {
                        CvPoint p = rects[i];
                        if (!InPoints(p, rects, 0, i - 1))
                            context1.Points[k++] = new CvPoint2D32f(p.x, p.y);
                    }
                    context1.Count = k;

                    Cv.cvFindCornerSubPix(context1.Gray, context1.Points.Pointer, context1.Count, new CvSize(WinSize, WinSize),
                        new CvSize(-1, -1), defaultTermCriteria);
                }
            }
            else if (context0.Count > 0)
            {
                Cv.cvCalcOpticalFlowPyrLK(context0.Gray, context1.Gray, context0.Pyramid, context1.Pyramid,
                    context0.Points.Pointer, context1.Points.Pointer,
                    context0.Count, new CvSize(WinSize, WinSize), 3,
                    status.Pointer, IntPtr.Zero, new CvTermCriteria(20, 0.03), flags);
                flags |= Cv.CV_LKFLOW_PYR_A_READY;

                bool[] inTracking = new bool[rects.Count];
                List<CvPoint> pointsInTrack = new List<CvPoint>();
                List<TrackedPoint> trackedPoints = new List<TrackedPoint>();
                int k = 0;
                for (int i = 0; i < context0.Count; i++)
                {
                    if (status[i] == 0) continue;

                    CvPoint2D32f sp = context1.Points[i];
                    bool valid = false;
                    CvPoint p = sp.ToCvPoint();
                    for (int j = 0; j < rects.Count; j++)
                    {
                        if (CvPoint.Distance2(p, rects[j]) < 25)
                        {
                            inTracking[j] = true;
                            valid = true;
                        }
                    }

                    if (!valid && context0.IsFading[i]) continue;

                    pointsInTrack.Add(p);

                    context1.Points[k] = sp;
                    context1.IsFading[k] = !valid;
                    ++k;
                    TrackedPoint tp = new TrackedPoint();
                    tp.p = sp;
                    CvPoint2D32f sp0 = context0.Points[i];
                    tp.offset = new CvPoint2D32f(sp.x - sp0.x, sp.y - sp0.y);
                    tp.isNewPoint = false;
                    trackedPoints.Add(tp);
                }
                int l = k;
                for (int i = 0; i < rects.Count; i++)
                {
                    CvPoint p = rects[i];
                    if (!inTracking[i] && !InPoints(p, pointsInTrack, 0, pointsInTrack.Count))
                    {
                        pointsInTrack.Add(rects[i]);
                        CvPoint2D32f sp = new CvPoint2D32f(p.x, p.y);
                        context1.Points[k++] = sp;

                        TrackedPoint tp = new TrackedPoint();
                        tp.p = sp;
                        tp.isNewPoint = true;
                        trackedPoints.Add(tp);
                    }
                }
                if (l < k)
                {
                    Cv.cvFindCornerSubPix(context1.Gray,
                        new IntPtr(context1.Points.Pointer.ToInt32() + 4 * l), k - l, new CvSize(WinSize, WinSize),
                        new CvSize(-1, -1), defaultTermCriteria);
                }
                context1.Count = k;
                PointTracker.trackedPoints = trackedPoints.ToArray();
            }
            else
            {
                context1.Count = 0;
                PointTracker.trackedPoints = null;
            }

            if (context1.Count == 0)
            {
                needToInitialize = true;
                return;
            }

            TrackPointContext c = context1; context1 = context0; context0 = c;
            needToInitialize = false;
        }
示例#5
0
        public static void InitializeTrackPoints(IplImage image)
        {
            status = new ByteSafeMemoryBox(MaxPointsCount);

            CvSize imageSize = image.Size;
            context0 = new TrackPointContext(imageSize, MaxPointsCount);
            context1 = new TrackPointContext(imageSize, MaxPointsCount);
        }
示例#6
0
 public IplImage CloneGray()
 {
     IplImage newImage = new IplImage(Size, Depth, 1);
     ConvertColor(newImage, Cv.CV_BGR2GRAY);
     return newImage;
 }
示例#7
0
        public static List<CvPoint> FindRectangles(IplImage image)
        {
            storage.Clear();

            const int N = 11;
            const int thresh = 50;

            CvSize imageSize = image.Size;
            CvSize evenImageSize = new CvSize(imageSize.width & ~1, imageSize.height & ~1);
            double MinArea = (evenImageSize.height / 12) * (evenImageSize.height / 12);
            double MaxArea = (evenImageSize.height / 4) * (evenImageSize.height / 4);

            List<CvPoint> squares = new List<CvPoint>();

            using (IplImage timg = new IplImage(imageSize, 8, 3),
                gray = new IplImage(evenImageSize, 8, 1),
                pyr = new IplImage(new CvSize(evenImageSize.width / 2, evenImageSize.height / 2), 8, 3))
            {
                Cv.cvCvtColor(image, timg, Cv.CV_BGR2HSV);

                CxCore.cvSetImageROI(timg, new CvRect(new CvPoint(), evenImageSize));

                Cv.cvPyrDown(timg, pyr, 7);
                Cv.cvPyrUp(pyr, timg, 7);
                IplImage tgray = new IplImage(evenImageSize, 8, 1);

                for (int c = 0; c < 3; c++)
                {
                    CxCore.cvSetImageCOI(timg, c + 1);
                    timg.CopyTo(tgray);

                    for (int l = 0; l < N; l++)
                    {
                        if (l == 0)
                        {
                            Cv.cvCanny(tgray, gray, 0, thresh, 5);
                            Cv.cvDilate(gray, gray, IntPtr.Zero, 1);
                        }
                        else
                        {
                            Cv.cvThreshold(tgray, gray, (l + 1) * 255 / N, 255, 0);
                        }

                        CvSeqNavigator contours;
                        using (IntPtrSafeMemoryBox box = new IntPtrSafeMemoryBox())
                        {
                            Cv.cvFindContours(gray, storage, box.Pointer, CvTypeSizes.CvContourSize,
                                Cv.CV_RETR_LIST, Cv.CV_CHAIN_APPROX_SIMPLE, new CvPoint());
                            contours = new CvSeqNavigator(box.Value);
                        }

                        while (!contours.IsEmpty)
                        {
                            IntPtr resultSeq = Cv.cvApproxPoly(contours.Pointer,
                                CvTypeSizes.CvContourSize, storage, Cv.CV_POLY_APPROX_DP,
                                Cv.cvContourPerimeter(contours) * 0.02, 0);

                            CvSeqCollection<CvPoint> result = new CvSeqCollection<CvPoint>(resultSeq, true);
                            if (result.Count == 4)
                            {
                                double area = Math.Abs(Cv.cvContourArea(resultSeq, CvSlice.WholeSeq));
                                if (area >= MinArea && area <= MaxArea &&
                                    Cv.cvCheckContourConvexity(resultSeq) != 0)
                                {
                                    CvPoint[] resultArray = result.ToArray();

                                    double dist1 = Math.Sqrt(CvPoint.Distance2(resultArray[0], resultArray[1]));
                                    double dist2 = Math.Sqrt(CvPoint.Distance2(resultArray[1], resultArray[2]));
                                    double dist3 = Math.Sqrt(CvPoint.Distance2(resultArray[2], resultArray[3]));
                                    double dist4 = Math.Sqrt(CvPoint.Distance2(resultArray[3], resultArray[0]));

                                    bool found = Math.Abs((dist1 - dist3) / (dist1 + dist3)) < 0.1 &&
                                        Math.Abs((dist2 - dist4) / (dist2 + dist4)) < 0.1 &&
                                        Math.Abs((dist1 - dist2) / (dist1 + dist2)) < 0.1;

                                    if (found)
                                    {
                                        for (int i = 0; i < 4; i++)
                                        {
                                            squares.Add(result[i]);
                                        }
                                    }
                                }
                            }

                            contours.Next();
                        }
                    }
                }
            }
            return squares;
        }
示例#8
0
 public static void Flip(IplImage source, IplImage destination, int mode)
 {
     CxCore.cvFlip(source.Handle, destination.Handle, mode);
 }
示例#9
0
 public static void Not(IplImage source, IplImage destination)
 {
     CxCore.cvNot(source.Handle, destination.Handle);
 }
示例#10
0
 public static void Copy(IplImage source, IplImage destination, IplImage mask)
 {
     CxCore.cvCopy(source.Handle, destination.Handle, mask.Handle);
 }
示例#11
0
 public static void Copy(IplImage source, IplImage destination)
 {
     CxCore.cvCopy(source.Handle, destination.Handle, IntPtr.Zero);
 }
示例#12
0
 public void CopyTo(IplImage destination, IplImage mask)
 {
     CxCore.cvCopy(Handle, destination.Handle, mask.Handle);
 }
示例#13
0
 public void CopyTo(IplImage destination)
 {
     CxCore.cvCopy(Handle, destination.Handle, IntPtr.Zero);
 }
示例#14
0
 public void ConvertColor(IplImage destination, int code)
 {
     Cv.cvCvtColor(Handle, destination.Handle, code);
 }