예제 #1
0
        /// <summary>
        /// Draw2ds the contour.
        /// </summary>
        /// <param name="image">Image.</param>
        /// <param name="color">Color.</param>
        public void draw2dContour(Mat image, Scalar color)
        {
            List <Point> points2dList = points2d.toList();

            for (int i = 0; i < points2dList.Count; i++)
            {
                Imgproc.line(image, points2dList[i], points2dList[(i + 1) % points2dList.Count], color, 2, Imgproc.LINE_AA, 0);
            }
        }
예제 #2
0
        /// <summary>
        /// Draw2ds the contour.
        /// </summary>
        /// <param name="image">Image.</param>
        /// <param name="color">Color.</param>
        public void draw2dContour(Mat image, Scalar color)
        {
//      Debug.Log ("points2d " + points2d.dump());

            List <Point> points2dList = points2d.toList();

            for (int i = 0; i < points2dList.Count; i++)
            {
                Imgproc.line(image, points2dList [i], points2dList [(i + 1) % points2dList.Count], color, 2, Imgproc.LINE_AA);
            }
        }
예제 #3
0
    private void drawPoints()
    {
        List <Point> cornersPrev = mMOP2fptsPrev.toList();
        List <Point> cornersThis = mMOP2fptsThis.toList();
        List <byte>  byteStatus  = mMOBStatus.toList();

        int x = 0;
        int y = byteStatus.Count - 1;

        for (x = 0; x < y; x++)
        {
            if (byteStatus[x] == 1)
            {
                Point pt  = cornersThis[x];
                Point pt2 = cornersPrev[x];
                Core.circle(rgbaMat, pt, 5, colorRed, lineThickness - 1);
                Core.line(rgbaMat, pt, pt2, colorRed, lineThickness);
            }
        }
    }
예제 #4
0
    private void refresh()
    {
        var defaultPoints = defaultCornerPoints.Select(e => e.point).ToArray();
        var destPoints    = CornerPoints.Select(e => e.point).ToArray();

        using (var defaultCornerMat = new MatOfPoint2f(defaultPoints))
            using (var destCornerMat = new MatOfPoint2f(destPoints))
                using (var defaultMat = new MatOfPoint2f(defaultVertices.Select(e => new Point(e.x, e.y)).ToArray()))
                    using (var destMat = new MatOfPoint2f(meshFilter.mesh.vertices.Select(e => new Point(e.x, e.y)).ToArray()))
                    {
                        var h = Calib3d.findHomography(defaultCornerMat, destCornerMat);
                        OpenCVForUnity.CoreModule.Core.perspectiveTransform(defaultMat, destMat, h);
                        var vertices = destMat.toList().Select(e => new Vector3((float)e.x, (float)e.y, 0f)).ToList();//resultPoints.Select (e => new Vector3((float)e.x,(float)e.y,0f)).ToList();
                        meshFilter.mesh.SetVertices(vertices);
                    }
    }
예제 #5
0
    public void GenerateSVG(string path)
    {
        StringWriter svgWriter = new StringWriter();
        Vector2      size      = new Vector2(instance.imageProcessingResults[0].cols(), instance.imageProcessingResults[0].rows());

        svgWriter.WriteLine("<svg width=\"" + size.x + "\" height=\"" + size.y + "\" xmlns=\"http://www.w3.org/2000/svg\">");

        List <MatOfPoint> contours = new List <MatOfPoint>();
        MatOfPoint2f      approx   = new MatOfPoint2f();

        Mat srcHierarchy = new Mat();
        int colorIndex   = 0;

        foreach (var layer in instance.imageProcessingResults)
        {
            var background = yarnEntities.First(y => y.yarnPanel.isBackground && y.yarnPanel.yarnZone == YarnPanel.YarnZone.Weft);
            if (colorIndex != background.clusterId)
            {
                Imgproc.findContours(layer, contours, srcHierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
                svgWriter.WriteLine("<g>");
                for (int i = 0; i < contours.Count; i++)
                {
                    MatOfPoint2f cnt     = new MatOfPoint2f(contours[i].toArray());
                    double       epsilon = 0.01 * Imgproc.arcLength(cnt, true);
                    //Imgproc.approxPolyDP(cnt, approx, epsilon, true);
                    approx = cnt;
                    List <Point> contourList = approx.toList();
                    svgWriter.Write("<path fill=\"none\" stroke=\"#" + ColorUtility.ToHtmlStringRGB(instance.clusterList[colorIndex]) + "\" d=\"M");
                    for (int j = 0; j < contourList.Count; j++)
                    {
                        svgWriter.Write("" + contourList[j].x + " " + (size.y - contourList[j].y) + " L");
                    }
                    svgWriter.GetStringBuilder().Length -= 1; //Eliminamos la última 'L'
                    svgWriter.WriteLine("Z\" />");
                }
                svgWriter.WriteLine("</g>");
            }

            colorIndex++;
        }
        svgWriter.WriteLine("</svg>");
        File.WriteAllText(path, svgWriter.ToString());
    }
예제 #6
0
        float norml2(MatOfPoint2f m1, MatOfPoint2f m2)
        {
            float norm = 0f;

            // Type conversions
            List <Point> m1list = m1.toList();
            List <Point> m2list = m2.toList();

            // Array sizes
            int dimM1 = m1list.Count;
            int dimM2 = m2list.Count;

            // Ensure that the arrays are the same size
            if (dimM1 != dimM2)
            {
                return(-1f);
            }

            for (int i = 0; i < dimM1; i++)
            {
                norm += Mathf.Sqrt(Mathf.Pow((float)(m2list[i].x - m1list[i].x), 2) + Mathf.Pow((float)(m2list[i].y - m1list[i].y), 2));
            }
            return(norm);
        }
예제 #7
0
        //public RawImage document;

        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat mainMat = webCamTextureToMatHelper.GetMat();


                if (!selectTarget) //find paper by contours
                {
                    grayMat = new Mat();

                    // convert texture to matrix
                    mainMat.copyTo(grayMat);

                    mainMat = findPaper(mainMat);

                    // display matrix on the screen
                    Utils.fastMatToTexture2D(mainMat, texture);
                }
                else
                { // using optical flow
                    // set the currentGrayMat mat
                    currentGrayMat = new Mat(mainMat.rows(), mainMat.cols(), Imgproc.COLOR_RGB2GRAY);
                    Imgproc.cvtColor(mainMat, currentGrayMat, Imgproc.COLOR_RGBA2GRAY);


                    if (initOpticalFlow == true) // doing the init setting for optical flow
                    {
                        // create 40 points
                        Point[] points = new Point[40];
                        // set those points near the corner
                        // paperCornerMatOfPoint  is the corner of the paper
                        for (int i = 0; i < 4; i++)
                        {
                            points[i * 10]     = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 1] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 2] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y + 1);
                            points[i * 10 + 3] = new Point(paperCornerMatOfPoint.toList()[i].x + 1, paperCornerMatOfPoint.toList()[i].y + 1);
                            points[i * 10 + 4] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 1);
                            points[i * 10 + 5] = new Point(paperCornerMatOfPoint.toList()[i].x - 1, paperCornerMatOfPoint.toList()[i].y);
                            points[i * 10 + 6] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 1);
                            points[i * 10 + 7] = new Point(paperCornerMatOfPoint.toList()[i].x, paperCornerMatOfPoint.toList()[i].y - 2);
                            points[i * 10 + 8] = new Point(paperCornerMatOfPoint.toList()[i].x - 2, paperCornerMatOfPoint.toList()[i].y - 2);
                            points[i * 10 + 9] = new Point(paperCornerMatOfPoint.toList()[i].x + 2, paperCornerMatOfPoint.toList()[i].y + 2);
                        }

                        // make the points closer to the corners (Harris Corner Detection )
                        //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40, qualityLevel, minDistance, none, blockSize, false, 0.04);
                        //Imgproc.goodFeaturesToTrack(currentGrayMat, corners, 40,0.05,20);

                        corners.fromArray(points);

                        prevFeatures.fromList(corners.toList());
                        currentFeatures.fromList(corners.toList());
                        prevGrayMat = currentGrayMat.clone();

                        // won't go back t again
                        initOpticalFlow = false;


                        // not that useful lol
                        // create random color
                        // not working now
                        for (int i = 0; i < maxCorners; i++)
                        {
                            color.Add(new Scalar((int)(Random.value * 255), (int)(Random.value * 255),
                                                 (int)(Random.value * 255), 255));
                        }
                    }
                    else
                    {
                        // Don't want ball move
                        //currentFeatures.fromArray(prevFeatures.toArray());


                        // want ball move
                        prevFeatures.fromArray(currentFeatures.toArray());

                        // optical flow it will changes the valu of currentFeatures
                        Video.calcOpticalFlowPyrLK(prevGrayMat, currentGrayMat, prevFeatures, currentFeatures, mMOBStatus, err);
                        //Debug.Log(st.rows());

                        // change to points list
                        List <Point> prevList  = prevFeatures.toList(),
                                     nextList  = currentFeatures.toList();
                        List <byte> byteStatus = mMOBStatus.toList();


                        int x = 0;
                        int y = byteStatus.Count - 1;

                        for (x = 0; x < y; x++)
                        {
                            if (byteStatus[x] == 1)
                            {
                                Point pt  = nextList[x];
                                Point pt2 = prevList[x];

                                Imgproc.circle(mainMat, pt, 10, new Scalar(0, 0, 255), -1);

                                Imgproc.line(mainMat, pt, pt2, new Scalar(0, 0, 255));
                            }
                        }

                        // draw the data
                        //for (int i = 0; i < prevList.Count; i++)
                        //{
                        //    //Imgproc.circle(frame, prevList[i], 5, color[10]);
                        //    Imgproc.circle(mainMat, nextList[i], 10, new Scalar(0, 0, 255), -1);

                        //    Imgproc.line(mainMat, prevList[i], nextList[i], color[20]);
                        //}


                        List <List <Point> > cornersFeatures = new List <List <Point> >(40);
                        cornersFeatures.Add(new List <Point>(10));

                        // put the corners features data into the list
                        int  tmp  = 0;
                        bool last = true;
                        for (int i = 0; i < nextList.Count - 1; i++)
                        {
                            if (Mathf.Abs((float)(nextList[i].x - nextList[i + 1].x)) < 10 && Mathf.Abs((float)(nextList[i].y - nextList[i + 1].y)) < 10)
                            {
                                if (last == true)
                                {
                                    cornersFeatures[tmp].Add(nextList[i]);
                                }
                                else
                                {
                                    cornersFeatures.Add(new List <Point>(10));
                                    tmp = tmp + 1;
                                    cornersFeatures[tmp].Add(nextList[i]);
                                }
                                last = true;
                            }
                            else
                            {
                                last = false;
                            }
                        }

                        // count corners
                        int manyCornersFeatures = 0;
                        for (int i = 0; i < cornersFeatures.Count; i++)
                        {
                            Debug.Log(cornersFeatures[i].Count);
                            if (cornersFeatures[i].Count < 5)
                            {
                                cornersFeatures.RemoveAt(i);
                            }
                            else
                            {
                                manyCornersFeatures++;
                            }
                        }

                        //Debug.Log("Length" + manyCornersFeatures);

                        // if corners equal 4 then diplay virtual docunment into the frame
                        // doing the perspective transform
                        if (manyCornersFeatures == 4)
                        {
                            Mat documentMat = new Mat(document.height, document.width, CvType.CV_8UC3);
                            Utils.texture2DToMat(document, documentMat);

                            List <Point> srcPoints = new List <Point>();
                            srcPoints.Add(new Point(0, 0));
                            srcPoints.Add(new Point(documentMat.cols(), 0));
                            srcPoints.Add(new Point(documentMat.cols(), documentMat.rows()));
                            srcPoints.Add(new Point(0, documentMat.rows()));


                            Mat srcPointsMat = Converters.vector_Point_to_Mat(srcPoints, CvType.CV_32F);


                            List <Point> dstPoints = new List <Point>()
                            {
                                cornersFeatures[0][0], cornersFeatures[1][0], cornersFeatures[2][0], cornersFeatures[3][0]
                            };
                            Mat dstPointsMat = Converters.vector_Point_to_Mat(dstPoints, CvType.CV_32F);


                            //Make perspective transform
                            Mat m         = Imgproc.getPerspectiveTransform(srcPointsMat, dstPointsMat);
                            Mat warpedMat = new Mat(new Size(), documentMat.type());
                            Debug.Log((cornersFeatures[1][0].x - cornersFeatures[0][0].x) + " " + (cornersFeatures[2][0].y - cornersFeatures[1][0].y));
                            Imgproc.warpPerspective(documentMat, warpedMat, m, mainMat.size(), Imgproc.INTER_LINEAR);
                            //warpedMat.convertTo(warpedMat, CvType.CV_32F);


                            //warpedMat.convertTo(warpedMat, CvType.CV_8UC3);
                            warpedMat.convertTo(warpedMat, CvType.CV_8UC3);
                            // same size as frame
                            Mat dst = new Mat(mainMat.size(), CvType.CV_8UC3);
                            //Mat dst = new Mat(frame.size(), CvType.CV_8UC3);
                            //Mat dst2 = new Mat();

                            Imgproc.cvtColor(mainMat, dst, Imgproc.COLOR_RGBA2RGB);

                            //dst.setTo(new Scalar(0, 255, 0));
                            //currentGrayMat.copyTo(dst);
                            //dst.convertTo(dst, CvType.CV_8UC3);


                            //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA);

                            Mat img1 = new Mat();
                            Mat mask = new Mat(mainMat.size(), CvType.CV_8UC1, new Scalar(0));
                            Imgproc.cvtColor(warpedMat, img1, Imgproc.COLOR_RGB2GRAY);
                            Imgproc.Canny(img1, img1, 100, 200);
                            List <MatOfPoint> doc_contours = new List <MatOfPoint>();;
                            Imgproc.findContours(img1, doc_contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
                            Imgproc.drawContours(mask, doc_contours, -1, new Scalar(255), Core.FILLED);

                            warpedMat.copyTo(dst, mask);

                            dst.convertTo(dst, CvType.CV_8UC3);

                            Debug.Log("dst" + dst.size());


                            Imgproc.cvtColor(dst, mainMat, Imgproc.COLOR_RGB2RGBA);


                            // display on the right
                            Texture2D finalTextue = new Texture2D(dst.width(), dst.height(), TextureFormat.RGB24, false);
                            Utils.matToTexture2D(dst, finalTextue);

                            targetRawImage.texture = finalTextue;
                        }


                        // current frame to old frame
                        prevGrayMat = currentGrayMat.clone();



                        //Imgproc.cvtColor(currentGrayMat, frame, Imgproc.COLOR_GRAY2RGBA);

                        // display matrix on the screen
                        Utils.fastMatToTexture2D(mainMat, texture);
                    }
                }
            }
        }
예제 #8
0
        /// <summary>
        /// Processes points by filter.
        /// </summary>
        /// <param name="img">Image mat.</param>
        /// <param name="srcPoints">Input points.</param>
        /// <param name="dstPoints">Output points.</param>
        /// <param name="drawDebugPoints">if true, draws debug points.</param>
        /// <returns>Output points.</returns>
        public override List <Vector2> Process(Mat img, List <Vector2> srcPoints, List <Vector2> dstPoints = null, bool drawDebugPoints = false)
        {
            if (srcPoints != null && srcPoints.Count != numberOfElements)
            {
                throw new ArgumentException("The number of elements is different.");
            }

            if (srcPoints == null)
            {
                return(dstPoints == null ? srcPoints : dstPoints);
            }

            if (!flag)
            {
                if (img.channels() == 4)
                {
                    Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGBA2GRAY);
                }
                else if (img.channels() == 3)
                {
                    Imgproc.cvtColor(img, prevgray, Imgproc.COLOR_RGB2GRAY);
                }
                else
                {
                    if (prevgray.total() == 0)
                    {
                        prevgray = img.clone();
                    }
                    else
                    {
                        img.copyTo(prevgray);
                    }
                }

                for (int i = 0; i < numberOfElements; i++)
                {
                    prevTrackPts[i] = new Point(srcPoints[i].x, srcPoints[i].y);
                }

                flag = true;
            }

            if (srcPoints != null)
            {
                if (dstPoints == null)
                {
                    dstPoints = new List <Vector2>();
                }
                if (dstPoints != null && dstPoints.Count != numberOfElements)
                {
                    dstPoints.Clear();
                    for (int i = 0; i < numberOfElements; i++)
                    {
                        dstPoints.Add(new Vector2());
                    }
                }

                if (img.channels() == 4)
                {
                    Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGBA2GRAY);
                }
                else if (img.channels() == 3)
                {
                    Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGB2GRAY);
                }
                else
                {
                    if (gray.total() == 0)
                    {
                        gray = img.clone();
                    }
                    else
                    {
                        img.copyTo(gray);
                    }
                }

                if (prevgray.total() > 0)
                {
                    mOP2fPrevTrackPts.fromList(prevTrackPts);
                    mOP2fNextTrackPts.fromList(nextTrackPts);
                    Video.calcOpticalFlowPyrLK(prevgray, gray, mOP2fPrevTrackPts, mOP2fNextTrackPts, status, err);
                    prevTrackPts = mOP2fPrevTrackPts.toList();
                    nextTrackPts = mOP2fNextTrackPts.toList();

                    // clac diffDlib
                    prevTrackPtsMat.fromList(prevTrackPts);
                    OpenCVForUnity.CoreModule.Rect rect = Imgproc.boundingRect(prevTrackPtsMat);
                    double diffDlib = this.diffDlib * rect.area() / 40000.0 * diffCheckSensitivity;

                    // if the face is moving so fast, use dlib to detect the face
                    double diff = calDistanceDiff(prevTrackPts, nextTrackPts);
                    if (drawDebugPoints)
                    {
                        Debug.Log("variance:" + diff);
                    }
                    if (diff > diffDlib)
                    {
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            nextTrackPts[i].x = srcPoints[i].x;
                            nextTrackPts[i].y = srcPoints[i].y;

                            dstPoints[i] = srcPoints[i];
                        }

                        if (drawDebugPoints)
                        {
                            Debug.Log("DLIB");
                            for (int i = 0; i < numberOfElements; i++)
                            {
                                Imgproc.circle(img, new Point(srcPoints[i].x, srcPoints[i].y), 2, new Scalar(255, 0, 0, 255), -1);
                            }
                        }
                    }
                    else
                    {
                        // In this case, use Optical Flow
                        for (int i = 0; i < numberOfElements; i++)
                        {
                            dstPoints[i] = new Vector2((float)nextTrackPts[i].x, (float)nextTrackPts[i].y);
                        }

                        if (drawDebugPoints)
                        {
                            Debug.Log("Optical Flow");
                            for (int i = 0; i < numberOfElements; i++)
                            {
                                Imgproc.circle(img, nextTrackPts[i], 2, new Scalar(0, 0, 255, 255), -1);
                            }
                        }
                    }
                }
                Swap(ref prevTrackPts, ref nextTrackPts);
                Swap(ref prevgray, ref gray);
            }
            return(dstPoints);
        }
예제 #9
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();

                matOpFlowThis.Dispose();
                matOpFlowPrev.Dispose();
                MOPcorners.Dispose();
                mMOP2fptsThis.Dispose();
                mMOP2fptsPrev.Dispose();
                mMOP2fptsSafe.Dispose();
                mMOBStatus.Dispose();
                mMOFerr.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);


                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IPHONE && !UNITY_EDITOR
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif

                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                    matOpFlowThis = new Mat();
                    matOpFlowPrev = new Mat();
                    MOPcorners    = new MatOfPoint();
                    mMOP2fptsThis = new MatOfPoint2f();
                    mMOP2fptsPrev = new MatOfPoint2f();
                    mMOP2fptsSafe = new MatOfPoint2f();
                    mMOBStatus    = new MatOfByte();
                    mMOFerr       = new MatOfFloat();

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, 0);
                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                                                                                #endif
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										if (webCamTexture.videoRotationAngle == 270)
//												scaleY = -1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                                                                                #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
                    Camera.main.orthographicSize = webCamTexture.width / 2;
                                                                                #else
                    Camera.main.orthographicSize = webCamTexture.height / 2;
                                                                                #endif

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

                                                #if UNITY_IPHONE && !UNITY_EDITOR
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 90)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 90)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 270)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.width / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("OpenCVForUnitySample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            GUILayout.EndVertical();
        }
    }
}
예제 #10
0
        /// <summary>
        /// Recognizes the markers.
        /// </summary>
        /// <param name="grayscale">Grayscale.</param>
        /// <param name="detectedMarkers">Detected markers.</param>
        void recognizeMarkers(Mat grayscale, List <Marker> detectedMarkers)
        {
            List <Marker> goodMarkers = new List <Marker> ();

            // Identify the markers
            for (int i = 0; i < detectedMarkers.Count; i++)
            {
                Marker marker = detectedMarkers [i];


                // Find the perspective transformation that brings current marker to rectangular form
                Mat markerTransform = Imgproc.getPerspectiveTransform(new MatOfPoint2f(marker.points.toArray()), m_markerCorners2d);


                // Transform image to get a canonical marker image
                Imgproc.warpPerspective(grayscale, canonicalMarkerImage, markerTransform, markerSize);

                for (int p = 0; p < m_markerDesigns.Count; p++)
                {
                    MatOfInt nRotations = new MatOfInt(0);
                    int      id         = Marker.getMarkerId(canonicalMarkerImage, nRotations, m_markerDesigns [p]);
                    if (id != -1)
                    {
                        marker.id = id;
//                              Debug.Log ("id " + id);

                        //sort the points so that they are always in the same order no matter the camera orientation
                        List <Point> MarkerPointsList = marker.points.toList();

                        //              std::rotate(marker.points.begin(), marker.points.begin() + 4 - nRotations, marker.points.end());
                        MarkerPointsList = MarkerPointsList.Skip(4 - nRotations.toArray() [0]).Concat(MarkerPointsList.Take(4 - nRotations.toArray() [0])).ToList();

                        marker.points.fromList(MarkerPointsList);

                        goodMarkers.Add(marker);
                    }
                    nRotations.Dispose();
                }
            }

//              Debug.Log ("goodMarkers " + goodMarkers.Count);

            // Refine marker corners using sub pixel accuracy
            if (goodMarkers.Count > 0)
            {
                List <Point> preciseCornersPoint = new List <Point> (4 * goodMarkers.Count);
                for (int i = 0; i < preciseCornersPoint.Capacity; i++)
                {
                    preciseCornersPoint.Add(new Point(0, 0));
                }



                for (int i = 0; i < goodMarkers.Count; i++)
                {
                    Marker marker = goodMarkers [i];

                    List <Point> markerPointsList = marker.points.toList();

                    for (int c = 0; c < 4; c++)
                    {
                        preciseCornersPoint [i * 4 + c] = markerPointsList [c];
                    }
                }

                MatOfPoint2f preciseCorners = new MatOfPoint2f(preciseCornersPoint.ToArray());

                TermCriteria termCriteria = new TermCriteria(TermCriteria.MAX_ITER | TermCriteria.EPS, 30, 0.01);
                Imgproc.cornerSubPix(grayscale, preciseCorners, new Size(5, 5), new Size(-1, -1), termCriteria);

                preciseCornersPoint = preciseCorners.toList();

                // Copy refined corners position back to markers
                for (int i = 0; i < goodMarkers.Count; i++)
                {
                    Marker marker = goodMarkers [i];

                    List <Point> markerPointsList = marker.points.toList();

                    for (int c = 0; c < 4; c++)
                    {
                        markerPointsList [c] = preciseCornersPoint [i * 4 + c];
                    }
                }
                preciseCorners.Dispose();
            }

            detectedMarkers.Clear();
            detectedMarkers.AddRange(goodMarkers);
        }
예제 #11
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 *  Parameters:
                 *      prevImg first 8-bit input image
                 *      nextImg second input image
                 *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }

//              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
    // Update is called once per frame
    void Update()
    {
        if (!IsStarted)
        {
            return;
        }
        Mat grayMat = webCamTextureToMat.GetMat();

        Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
        //Debug.Log("mMOP2fptsPrev.rows() : " + mMOP2fptsPrev.rows().ToString());

        //Debug.Log("rgbaMat.rows() : " + rgbaMat.rows().ToString());
        //Debug.Log("matOpFlowThis.rows() : " + matOpFlowThis.rows().ToString());

        if (mMOP2fptsPrev.rows() == 0)
        {
            // first time through the loop so we need prev and this mats
            // plus prev points
            // get this mat
            //rgbaMat.copyTo(matOpFlowThis);

            grayMat.copyTo(matOpFlowThis);
            grayMat.copyTo(matOpFlowPrev);
            //matOpFlowThis = rgbaMat;
            //matOpFlowPrev = rgbaMat;
            matOpFlowPrev.empty();
            //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type());
            //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // copy that to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            // get prev corners
            Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.1, 100);
            mMOP2fptsPrev.fromArray(MOPcorners.toArray());

            // get safe copy of this corners
            mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
            //Debug.Log("opencv optical flow --- 1 ");
        }
        else
        {
            // we've been through before so
            // this mat is valid. Copy it to prev mat
            //rgbaMat.copyTo(matOpFlowThis);
            //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type());
            matOpFlowThis.copyTo(matOpFlowPrev);

            //matOpFlowThis = new Mat(rgbaMat.size(), rgbaMat.type());

            // get this mat
            grayMat.copyTo(matOpFlowThis);
            //matOpFlowThis = rgbaMat;
            //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // get the corners for this mat
            Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.1, 100);
            mMOP2fptsThis.fromArray(MOPcorners.toArray());

            // retrieve the corners from the prev mat
            // (saves calculating them again)
            mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

            // and save this corners for next time through

            mMOP2fptsThis.copyTo(mMOP2fptsSafe);

            //Debug.Log("opencv optical flow --- 2 ");
        }


        /*
         *  Parameters:
         *      prevImg first 8-bit input image
         *      nextImg second input image
         *      prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
         *      nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
         *      status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
         *      err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
         */
        Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

        if (!mMOBStatus.empty())
        {
            List <Point> cornersPrev = mMOP2fptsPrev.toList();
            List <Point> cornersThis = mMOP2fptsThis.toList();
            List <byte>  byteStatus  = mMOBStatus.toList();

            int x = 0;
            int y = byteStatus.Count - 1;

            int num_distance = 0;
            for (x = 0; x < y; x++)
            {
                if (byteStatus[x] == 1)
                {
                    Point pt  = cornersThis[x];
                    Point pt2 = cornersPrev[x];

                    Imgproc.circle(grayMat, pt, 5, colorRed, iLineThickness - 1);

                    Imgproc.line(grayMat, pt, pt2, colorRed, iLineThickness);
                    double distance = System.Math.Sqrt(System.Math.Pow((pt2.x - pt.x), 2.0) + System.Math.Pow((pt2.y - pt.y), 2.0));
                    if (distance > 20)
                    {
                        num_distance++;
                    }

                    //Utilities.Debug("Distance[" + x + "] : " + distance);
                    //Debug.Log("Distance[" + x + "] : " + distance);
                }
            }
            Debug.Log("Num of Distance : " + num_distance);
            if (num_distance > 0)
            {
                Debug.Log("Movement Detected !!");
            }
        }

        //              Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
        //this.GetComponent<CVCore>().Add(0, rgbaMat);
        Utils.matToTexture2D(grayMat, texture, colors);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
    }
예제 #13
0
        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif

                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                //flip to correct direction.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }

                if (mMOP2fptsPrev.rows() == 0)
                {
                    // first time through the loop so we need prev and this mats
                    // plus prev points
                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // copy that to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get prev corners
                    Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsPrev.fromArray(MOPcorners.toArray());

                    // get safe copy of this corners
                    mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
                }
                else
                {
                    // we've been through before so
                    // this mat is valid. Copy it to prev mat
                    matOpFlowThis.copyTo(matOpFlowPrev);

                    // get this mat
                    Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                    // get the corners for this mat
                    Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                    mMOP2fptsThis.fromArray(MOPcorners.toArray());

                    // retrieve the corners from the prev mat
                    // (saves calculating them again)
                    mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

                    // and save this corners for next time through

                    mMOP2fptsThis.copyTo(mMOP2fptsSafe);
                }


                /*
                 * Parameters:
                 * prevImg first 8-bit input image
                 * nextImg second input image
                 * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
                 * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
                 * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
                 * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
                 */
                Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                if (!mMOBStatus.empty())
                {
                    List <Point> cornersPrev = mMOP2fptsPrev.toList();
                    List <Point> cornersThis = mMOP2fptsThis.toList();
                    List <byte>  byteStatus  = mMOBStatus.toList();

                    int x = 0;
                    int y = byteStatus.Count - 1;

                    for (x = 0; x < y; x++)
                    {
                        if (byteStatus [x] == 1)
                        {
                            Point pt  = cornersThis [x];
                            Point pt2 = cornersPrev [x];

                            Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                            Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness);
                        }
                    }
                }



                Utils.matToTexture2D(rgbaMat, texture, colors);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }
예제 #14
0
		/// <summary>
		/// Recognizes the markers.
		/// </summary>
		/// <param name="grayscale">Grayscale.</param>
		/// <param name="detectedMarkers">Detected markers.</param>
		void recognizeMarkers (Mat grayscale, List<Marker> detectedMarkers)
		{
				List<Marker> goodMarkers = new List<Marker> ();
		
				// Identify the markers
				for (int i=0; i<detectedMarkers.Count; i++) {
						Marker marker = detectedMarkers [i];

			
						// Find the perspective transformation that brings current marker to rectangular form
						Mat markerTransform = Imgproc.getPerspectiveTransform (new MatOfPoint2f (marker.points.toArray ()), m_markerCorners2d);
				

						// Transform image to get a canonical marker image
						Imgproc.warpPerspective (grayscale, canonicalMarkerImage, markerTransform, markerSize);
			
						MatOfInt nRotations = new MatOfInt (0);
						int id = Marker.getMarkerId (canonicalMarkerImage, nRotations, m_markerDesign);
						if (id != - 1) {
								marker.id = id;
//				                Debug.Log ("id " + id);

								//sort the points so that they are always in the same order no matter the camera orientation
								List<Point> MarkerPointsList = marker.points.toList ();

								//				std::rotate(marker.points.begin(), marker.points.begin() + 4 - nRotations, marker.points.end());
								MarkerPointsList = MarkerPointsList.Skip (4 - nRotations.toArray () [0]).Concat (MarkerPointsList.Take (4 - nRotations.toArray () [0])).ToList ();

								marker.points.fromList (MarkerPointsList);
				
								goodMarkers.Add (marker);
						}
						nRotations.Dispose ();
				}

//				Debug.Log ("goodMarkers " + goodMarkers.Count);
		
				// Refine marker corners using sub pixel accuracy
				if (goodMarkers.Count > 0) {
						List<Point> preciseCornersPoint = new List<Point> (4 * goodMarkers.Count);
						for (int i = 0; i < preciseCornersPoint.Capacity; i++) {
								preciseCornersPoint.Add (new Point (0, 0));
						}
						

			
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c = 0; c <4; c++) {
										preciseCornersPoint [i * 4 + c] = markerPointsList [c];
								}
						}

						MatOfPoint2f preciseCorners = new MatOfPoint2f (preciseCornersPoint.ToArray ());

						TermCriteria termCriteria = new TermCriteria (TermCriteria.MAX_ITER | TermCriteria.EPS, 30, 0.01);
						Imgproc.cornerSubPix (grayscale, preciseCorners, new Size (5, 5), new Size (-1, -1), termCriteria);

						preciseCornersPoint = preciseCorners.toList ();
			
						// Copy refined corners position back to markers
						for (int i=0; i<goodMarkers.Count; i++) {
								Marker marker = goodMarkers [i];

								List<Point> markerPointsList = marker.points.toList ();
				
								for (int c=0; c<4; c++) {
										markerPointsList [c] = preciseCornersPoint [i * 4 + c];
								}
						}
						preciseCorners.Dispose ();
				}

				detectedMarkers.Clear ();
				detectedMarkers.AddRange (goodMarkers);

		}
예제 #15
0
    //Optical flow
    IEnumerator OpticalFlow()
    {
        Scalar tempHue;
        Scalar tempSpeed;

        int iCountTrackedPoints = 0;
        int vecCount            = 0;

        if (mMOP2fptsPrev.rows() == 0)
        {
            // first time through the loop so we need prev and this mats
            Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            // copy that to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            //if (blurImage == true){
            //Gaussian filter of the MOG2 images
            //Imgproc.GaussianBlur(matOpFlowPrev, matOpFlowPrev, kernelSize, sigmaX, sigmaY);//Gauss filter
            //}
            // get prev corners
            Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, qLevel, minDistCorners);             //SLIDER input
            mMOP2fptsPrev.fromArray(MOPcorners.toArray());

            // get safe copy of this corners
            mMOP2fptsPrev.copyTo(mMOP2fptsSafe);
        }
        else
        {
            // we've been through before so
            // this mat is valid. Copy it to prev mat
            matOpFlowThis.copyTo(matOpFlowPrev);

            // get this mat
            Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

            //if (blurImage == true){
            //Gaussian filter of the MOG2 images
            //Imgproc.GaussianBlur(matOpFlowThis, matOpFlowThis, kernelSize, sigmaX, sigmaY);//Gauss filter
            //}
            // get the corners for this mat
            Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, qLevel, minDistCorners);             // SLIDER input
            mMOP2fptsThis.fromArray(MOPcorners.toArray());

            // retrieve the corners from the prev mat (saves calculating them again)
            mMOP2fptsSafe.copyTo(mMOP2fptsPrev);

            // and save this corners for next time through
            mMOP2fptsThis.copyTo(mMOP2fptsSafe);
        }

        Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

        if (mMOBStatus.rows() > 0)
        {
            List <Point> cornersPrev = mMOP2fptsPrev.toList();
            List <Point> cornersThis = mMOP2fptsThis.toList();
            List <byte>  byteStatus  = mMOBStatus.toList();

            int x = 0;
            int y = byteStatus.Count - 1;

            double absX;
            double absY;             //will use for calculation of polar coordiates

            for (x = 0; x < y; x++)
            {
                if (byteStatus [x] == 1)
                {
                    Point pt  = cornersThis [x];
                    Point pt2 = cornersPrev [x];

                    //if (pt != pt2) {//I think this IF statement should be removed as pt and pt2 should always be differnt

                    float mySpeed = CalculateSpeedFloat(pt, pt2);

                    absX = pt.x - pt2.x;
                    absY = pt.y - pt2.y;
                    float angle = Mathf.Atan2((float)absX, (float)absY) * Mathf.Rad2Deg;
                    angle = Mathf.RoundToInt(angle);

                    //Get Hue based on Angle
                    tempHue   = GetHueColor((int)angle);
                    tempSpeed = GetSpeedColor((int)mySpeed);

                    //Store so we can add tracers
                    if (mySpeed > maxSpeed)                               //|| CalculateSpeedFloat (pt, pt2) <= 1
                    {
                        yield return(null);
                    }
                    else
                    {
                        tracerPoints.AddTracersToStorage(pt, pt2, tempHue, tempSpeed, videoPlayer.frame, angle, mySpeed);
                        speedVec = speedVec + mySpeed;
                        angleVec = angleVec + angle;
                        vecCount++;
                        //tracerPoints2.AddTracersToStorage (pt, pt2, tempSpeed, videoPlayer.frame, angle, mySpeed);

                        //ADD STORING SPEEDS TO VECTOR
                        //CSVDataEmail.AddDataTrack (speed,angle.ToString ());
                    }
                    iCountTrackedPoints++;
                }
            }
        }

        meanSpeed = (int)(speedVec / vecCount);
        meanAngle = (int)(angleVec / vecCount);
        //sTrackingLogger = "Video frame: " + videoPlayer.frame.ToString() + "    Points: " + iCountTrackedPoints.ToString() + "";
        sTrackingLogger = "Speed: " + meanSpeed.ToString() + " Angle: " + meanAngle.ToString() + "";

        textTrackedPoints.text = sTrackingLogger;
        yield return(null);
    }
예제 #16
0
    public int getAnswerNumber(Mat align, Rect r)
    {
        Mat roi = new Mat(align, r);
        Mat roi_gray = new Mat(), roi_edges = new Mat();

        Imgproc.cvtColor(roi, roi_gray, Imgproc.COLOR_RGB2GRAY);
        Imgproc.Canny(roi_gray, roi_edges, 200, 200);
        // Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1));
        // Imgproc.dilate(roi_edges, roi_edges, element);

        //Shape detection
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(roi_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

        List <MatOfPoint> hulls = new List <MatOfPoint>();

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfInt hull_temp = new MatOfInt();
            Imgproc.convexHull(contours[i], hull_temp);
            int[]   arrIndex   = hull_temp.toArray();
            Point[] arrContour = contours[i].toArray();
            Point[] arrPoints  = new Point[arrIndex.Length];

            for (int k = 0; k < arrIndex.Length; k++)
            {
                arrPoints[k] = arrContour[arrIndex[k]];
            }

            MatOfPoint temp = new MatOfPoint();
            temp.fromArray(arrPoints);

            //Filter outliers
            if (Imgproc.contourArea(temp) > 40 && Imgproc.contourArea(temp) < 200)
            {
                hulls.Add(temp);
            }
        }

        List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();

        for (int i = 0; i < hulls.Count; i++)
        {
            MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
            hull2f.Add(newPoint);
        }

        for (int i = 0; i < hulls.Count; i++)
        {
            //Approximate polygon
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
            List <Point> approx_polygon = approx.toList();
            approx_polygon = Scannerproc.filterPolygon(approx_polygon);
            double area = Imgproc.contourArea(approx);

            //Center of mass
            int cx = 0,
                cy = 0;
            for (int k = 0; k < approx_polygon.Count; k++)
            {
                cx += (int)approx_polygon[k].x;
                cy += (int)approx_polygon[k].y;
            }
            cx /= approx_polygon.Count;
            cy /= approx_polygon.Count;

            // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1);


            // Texture2D tex = new Texture2D(roi.width(), roi.height(), TextureFormat.RGB24, false);
            // Utils.matToTexture2D(roi, tex);
            // byte[] bytes1 = tex.EncodeToJPG();
            // File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1);

            Point pos1   = new Point((roi.width() * 1) / 10, cy);
            Point pos2   = new Point((roi.width() * 3) / 10, cy);
            Point pos3   = new Point((roi.width() * 5) / 10, cy);
            Point pos4   = new Point((roi.width() * 7) / 10, cy);
            Point pos5   = new Point((roi.width() * 9) / 10, cy);
            Point nowPos = new Point(cx, cy);

            double[] dist = new double[5];
            dist[0] = Scannerproc.distanceTwoPoints(pos1, nowPos);
            dist[1] = Scannerproc.distanceTwoPoints(pos2, nowPos);
            dist[2] = Scannerproc.distanceTwoPoints(pos3, nowPos);
            dist[3] = Scannerproc.distanceTwoPoints(pos4, nowPos);
            dist[4] = Scannerproc.distanceTwoPoints(pos5, nowPos);

            int    id       = -1;
            double min_dist = 999999;
            for (int t = 0; t < 5; t++)
            {
                if (dist[t] < min_dist)
                {
                    min_dist = dist[t];
                    id       = t;
                }
            }


            return(id + 1);

            //return plusPoints(tl, new Point(cx, cy));
        }



        return(0);
    }
예제 #17
0
    public void getAnswerNumber(Mat align)
    {
        Mat align_gray = new Mat(), align_edges = new Mat();

        Imgproc.cvtColor(align, align_gray, Imgproc.COLOR_RGB2GRAY);
        Imgproc.Canny(align_gray, align_edges, 50, 50);
        Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(2 + 1, 2 + 1), new Point(1, 1));

        Imgproc.dilate(align_edges, align_edges, element);


        //Shape detection
        List <MatOfPoint> contours = new List <MatOfPoint>();
        Mat hierarchy = new Mat();

        Imgproc.findContours(align_edges, contours, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

        List <MatOfPoint> hulls = new List <MatOfPoint>();

        //Texture2D tex = new Texture2D(align_edges.width(), align_edges.height(), TextureFormat.RGB24, false);
        //Utils.matToTexture2D(align_edges, tex);
        //byte[] bytes1 = tex.EncodeToJPG();
        //File.WriteAllBytes("D:/2019/OMR/" + "test.png", bytes1);

        for (int i = 0; i < contours.Count; i++)
        {
            MatOfInt hull_temp = new MatOfInt();
            Imgproc.convexHull(contours[i], hull_temp);
            int[]   arrIndex   = hull_temp.toArray();
            Point[] arrContour = contours[i].toArray();
            Point[] arrPoints  = new Point[arrIndex.Length];

            for (int k = 0; k < arrIndex.Length; k++)
            {
                arrPoints[k] = arrContour[arrIndex[k]];
            }

            MatOfPoint temp = new MatOfPoint();
            temp.fromArray(arrPoints);

            //Filter outliers
            if (Imgproc.contourArea(temp) > 90000 && Imgproc.contourArea(temp) < 110000)
            {
                hulls.Add(temp);
            }
        }

        List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();

        for (int i = 0; i < hulls.Count; i++)
        {
            MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
            hull2f.Add(newPoint);
        }

        List <Rect> rects = new List <Rect>();

        for (int i = 0; i < hulls.Count; i++)
        {
            //Approximate polygon
            MatOfPoint2f approx = new MatOfPoint2f();
            Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
            List <Point> approx_polygon = approx.toList();
            approx_polygon = Scannerproc.filterPolygon(approx_polygon);
            double area = Imgproc.contourArea(approx);

            if (Scannerproc.isSquare(approx_polygon))
            {
                Rect r         = Imgproc.boundingRect(new MatOfPoint(approx_polygon.ToArray()));
                bool isContain = false;
                for (int k = 0; k < rects.Count; k++)
                {
                    if (Scannerproc.distanceTwoPoints(rects[k].tl(), r.tl()) < 100)
                    {
                        //if (rects[k].contains(r) || r.contains(rects[k]))
                        isContain = true;
                    }
                }

                if (!isContain)
                {
                    rects.Add(r);
                    // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(255, 0, 0, 255), 3);

                    for (int j = 1; j < 21; j++)
                    {
                        Rect roi = new Rect((int)r.tl().x + (int)((r.width * 1.3) / 6), (int)r.tl().y + (r.height / 21) * j, (int)((r.width * 4.7) / 6), r.height / 21);
                        int  num = getAnswerNumber(align, roi);
                        if (num != 0)
                        {
                            Imgproc.putText(align, " " + num, new Point(roi.x - 40, roi.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false);
                            Imgproc.rectangle(align, roi.tl(), roi.br(), new Scalar(0, 255, 0, 255), 2);
                        }
                    }
                }
            }

            //Center of mass
            int cx = 0,
                cy = 0;
            for (int k = 0; k < approx_polygon.Count; k++)
            {
                cx += (int)approx_polygon[k].x;
                cy += (int)approx_polygon[k].y;
            }
            cx /= approx_polygon.Count;
            cy /= approx_polygon.Count;

            // Imgproc.circle(roi, new Point(cx, cy), 5, new Scalar(255), -1);
        }

        if (rects.Count == 4)
        {
            nowDetected = false;
        }
    }
예제 #18
0
// Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            frame = webCamTextureToMatHelper.GetMat();
            frame.copyTo(img_orig);

            drawing = img_orig.clone();

            int       lowThreshold = 50;// (int)200;// slider.value;
            const int ratio        = 1;
            const int kernel_size  = 3;

            Imgproc.cvtColor(img_orig, img_lab, Imgproc.COLOR_BGR2Lab);
            double omrSize = img_orig.cols() * img_orig.rows();

            Imgproc.cvtColor(img_orig, img_gray, Imgproc.COLOR_RGBA2GRAY);
            Imgproc.GaussianBlur(img_gray, img_gray, new Size(15, 15), 1.5, 1.5);       //Gaussian blur
            Imgproc.erode(img_gray, img_gray, new Mat(), new Point(-1, -1), 1);         //Erosion
                                                                                        // Imgproc.dilate(img_gray, img_gray, new Mat(), new Point(-1, -1), 10, 1, new Scalar(10));    //Dilation
            Imgproc.Canny(img_gray, img_edges, lowThreshold, lowThreshold * ratio, kernel_size, false);

            //Shape detection
            List <MatOfPoint> contours = new List <MatOfPoint>();
            Mat hierarchy = new Mat();
            Imgproc.findContours(img_edges, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0));

            //Texture2D tex = new Texture2D(img_edges.width(), img_edges.height(), TextureFormat.RGB24, false);
            //Utils.matToTexture2D(img_edges, tex);
            //byte[] bytes1 = tex.EncodeToJPG();
            //File.WriteAllBytes("D:/2019/OMR/" + "test213123.png", bytes1);

            List <MatOfPoint> hulls = new List <MatOfPoint>();

            for (int i = 0; i < contours.Count; i++)
            {
                MatOfInt hull_temp = new MatOfInt();
                Imgproc.convexHull(contours[i], hull_temp);
                int[]   arrIndex   = hull_temp.toArray();
                Point[] arrContour = contours[i].toArray();
                Point[] arrPoints  = new Point[arrIndex.Length];

                for (int k = 0; k < arrIndex.Length; k++)
                {
                    arrPoints[k] = arrContour[arrIndex[k]];
                }

                MatOfPoint temp = new MatOfPoint();
                temp.fromArray(arrPoints);

                //Filter outliers
                if (Imgproc.contourArea(temp) > omrSize / 3 && Imgproc.contourArea(temp) < (omrSize * 4) / 5)
                {
                    hulls.Add(temp);
                }
            }

            List <MatOfPoint2f> hull2f = new List <MatOfPoint2f>();
            for (int i = 0; i < hulls.Count; i++)
            {
                MatOfPoint2f newPoint = new MatOfPoint2f(hulls[i].toArray());
                hull2f.Add(newPoint);
            }

            for (int i = 0; i < hulls.Count; i++)
            {
                //Approximate polygon
                MatOfPoint2f approx = new MatOfPoint2f();

                Imgproc.approxPolyDP(hull2f[i], approx, 0.01 * Imgproc.arcLength(hull2f[i], true), true);
                List <Point> approx_polygon = approx.toList();
                // approx_polygon = Scannerproc.filterPolygon(approx_polygon);
                // Debug.Log(approx_polygon.Count);
                if (!Scannerproc.isSquare(approx_polygon))
                {
                    continue;
                }
                else
                {
                    nowRectPoints.Clear();
                    nowRectPoints.AddRange(approx_polygon);
                    perspectiveAlign();
                }

                //Center of mass
                int cx = 0,
                    cy = 0;


                for (int k = 0; k < approx_polygon.Count; k++)
                {
                    cx += (int)approx_polygon[k].x;
                    cy += (int)approx_polygon[k].y;
                }
                cx /= approx_polygon.Count;
                cy /= approx_polygon.Count;

                Scannerproc.drawShape(drawing, approx_polygon, new Scalar(0, 255, 0));
            }

            if (showTextureOnScreen)
            {
                showCurrentTextureOnScreen();
            }
        }
    }