Beispiel #1
0
    void Update()
    {
        Image camImg = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (camImg != null)
        {
            if (camImageMat == null)
            {
                camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4);                   //Note: rows=height, cols=width
            }
            camImageMat.put(0, 0, camImg.Pixels);

            // Read from videoCap and save in mat
            videoCapture.read(frontCameraImgMat);

            Face.getFacesHAAR(frontCameraImgMat, faces, faceXML);

            Debug.Log("Faces " + faces.height());
            if (faces.height() > 0)
            {
                for (var i = 0; i < faces.height(); i++)
                {
                    double[] faceRect       = faces.get(i, 0);
                    Point    faceRectPoint1 = new Point(faceRect [0], faceRect [1]);
                    Point    faceRectPoint2 = new Point(faceRect [0] + faceRect [2], faceRect [1] + faceRect [3]);
                    Imgproc.rectangle(frontCameraImgMat, faceRectPoint1, faceRectPoint2, new Scalar(0, 0, 255), 5);
                    roi = new OpenCVForUnity.Rect(faceRectPoint1, faceRectPoint2);
                }

                faceWithMarkings = new Mat(frontCameraImgMat, roi);

                Face.getFacesHAAR(faceWithMarkings, eyes, eyeXML);

                Debug.Log("Eyes " + eyes.height());
                if (eyes.height() != 0)
                {
                    for (var i = 0; i < eyes.height(); i++)
                    {
                        if (i < 2)
                        {
                            double[] eyeRect   = eyes.get(i, 0);
                            Point    eyeCenter = new Point(eyeRect [2] * 0.5F + eyeRect [0], eyeRect [3] * 0.5F + eyeRect [1]);
                            int      radius    = (int)Mathf.Sqrt(Mathf.Pow(((float)eyeRect [2]) * 0.5F, 2F) + Mathf.Pow(((float)eyeRect [3]) * 0.5F, 2F));
                            Imgproc.circle(faceWithMarkings, new Point(eyeCenter.x, eyeCenter.y), radius, new Scalar(255, 0, 0), 5);
                        }
                    }
                }

                MatDisplay.MatToTexture(faceWithMarkings, ref unwarpedTexture);
                faceTargetPlane.GetComponent <Renderer> ().material.mainTexture = unwarpedTexture;
            }
        }

        MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND);
        MatDisplay.DisplayMat(frontCameraImgMat, MatDisplaySettings.BOTTOM_LEFT);
    }
Beispiel #2
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);

        Image cameraImage = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImage == null)
        {
            return;
        }
        if (_cameraImageMat == null)
        {
            //First frame -> generate Mat with same dimensions as camera feed
            _cameraImageMat = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC4);
        }
        _cameraImageMat.put(0, 0, cameraImage.Pixels); // transferring image data to Mat

        if (FingerPointTarget.GetComponent <ImageTargetBehaviour>().CurrentStatus != TrackableBehaviour.Status.TRACKED)
        {
            MatDisplay.DisplayMat(_cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
            return;
        }

        FindHomographyPoints(out var matDst, out var matObj);
        var H = Calib3d.findHomography(matObj, matDst);

        try
        {
            var bWMat                   = GetBWSkinColor();
            var fingerTipCoor           = FindFingerTip(bWMat);
            var fingerPointInWorldSpace = FingerPointInWorldSpace(fingerTipCoor);
            FingerPlane.position = fingerPointInWorldSpace;

            var colorPixelValue = FindPixelValue(_cameraImageMat, Color.position);
            var drawPixelValue  = FindPixelValue(bWMat, Draw.position);

            if ((int)drawPixelValue.First() == 255)
            {
                //Debug.Log($"{colorPixelValue[0]}, {colorPixelValue[1]}, {colorPixelValue[2]}");
                //Debug.Log("Found Draw");
                //draw at finger pos

                var camMask = PaintCircle(fingerTipCoor);
                DrawMaskOnCanvas(camMask, H, colorPixelValue);
            }
        }
        catch
        {
        }

        var blendTex = BlendMats(H, _cameraImageMat, _drawingPlaceMat);

        MatDisplay.DisplayMat(blendTex, MatDisplaySettings.FULL_BACKGROUND);
    }
    //private Renderer rd;
    //public Texture2D tex;
    void Start()
    {
        texMat = MatDisplay.LoadRGBATexture("/models/dog_tex.png");
        // OpenCV matrices need to be allocated first to indicate size
        imagePoints = new MatOfPoint2f();
        imagePoints.alloc(4);

        // rd = GetComponents<SkinnedMeshRenderer>()[0];

        /*
         * corner1 = GameObject.Find("corner1");
         * corner2 = GameObject.Find("corner2");
         * corner3 = GameObject.Find("corner2");
         * corner4 = GameObject.Find("corner2");
         */
    }
    private void OnTrackablesUpdated()
    {
        cam.fieldOfView = 41.5f;
        Image cameraImage = CameraDevice.Instance.GetCameraImage(mPixelFormat);

        if (cameraImage != null && mFormatRegistered)
        {
            if (cameraImageMat == null)
            {
                cameraImageMat = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC4);
                grayMat        = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC1);
            }
            cameraImageMat.put(0, 0, cameraImage.Pixels);
            Imgproc.cvtColor(cameraImageMat, grayMat, Imgproc.COLOR_BGRA2GRAY);
            MatDisplay.DisplayMat(grayMat, MatDisplaySettings.FULL_BACKGROUND);
        }
    }
Beispiel #5
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);
        Image cameraImage = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImage != null)
        {
            if (cameraImageMat == null)
            {
                cameraImageMat = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC4);
            }

            cameraImageMat.put(0, 0, cameraImage.Pixels);

            MatDisplay.DisplayMat(cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
//			Imgproc.blur(cameraImageMat, cameraImageMatBlur, new Size (16,16));
//			MatDisplay.DisplayMat (cameraImageMatBlur, MatDisplaySettings.BOTTOM_LEFT	);
        }
    }
Beispiel #6
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);

        Image cameraImageRaw = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImageRaw != null)
        {
            if (cameraImageMat == null)
            {
                // Rows first, then columns.
                cameraImageMat = new Mat(cameraImageRaw.Height, cameraImageRaw.Width, CvType.CV_8UC4);
                grayScale      = new Mat(cameraImageRaw.Height, cameraImageRaw.Width, CvType.CV_8UC4);
            }

            byte[] pixels = cameraImageRaw.Pixels;
            cameraImageMat.put(0, 0, pixels);
            Imgproc.cvtColor(cameraImageMat, grayScale, Imgproc.COLOR_RGB2GRAY);
            MatDisplay.DisplayMat(grayScale, MatDisplaySettings.FULL_BACKGROUND);
        }
    }
Beispiel #7
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);

        Image cameraImageRaw = CameraDevice.Instance.GetCameraImage(
            Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImageRaw != null)
        {
            if (cameraImageMat == null)
            {
                // Rows first, then columns.
                cameraImageMat = new Mat(cameraImageRaw.Height, cameraImageRaw.Width, CvType.CV_8UC4);
                countourMat    = new Mat(cameraImageRaw.Height, cameraImageRaw.Width, CvType.CV_8UC4);
            }

            byte[] pixels = cameraImageRaw.Pixels;
            cameraImageMat.put(0, 0, pixels);
            Imgproc.cvtColor(cameraImageMat, countourMat, Imgproc.COLOR_RGB2GRAY);

            Imgproc.adaptiveThreshold(countourMat, countourMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 7, 7);
            MatDisplay.DisplayMat(countourMat, MatDisplaySettings.FULL_BACKGROUND);
        }
    }
Beispiel #8
0
    void Update()
    {
        //Access camera image provided by Vuforia
        Image camImg = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (camImg != null)
        {
            if (camImageMat == null)
            {
                //First time -> instantiate camera image specific data
                camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4);  //Note: rows=height, cols=width
            }

            camImageMat.put(0, 0, camImg.Pixels);

            //Replace with your own projection matrix. This approach only uses fy.
            cam.fieldOfView = 2 * Mathf.Atan(camImg.Height * 0.5f / fy) * Mathf.Rad2Deg;

            Vector3 worldPnt1 = corner1.transform.position;
            Vector3 worldPnt2 = corner2.transform.position;
            Vector3 worldPnt3 = corner3.transform.position;
            Vector3 worldPnt4 = corner4.transform.position;

            //See lecture slides
            Matrix4x4 Rt = cam.transform.worldToLocalMatrix;
            Matrix4x4 A  = Matrix4x4.identity;
            A.m00 = fx;
            A.m11 = fy;
            A.m02 = cx;
            A.m12 = cy;

            Matrix4x4 worldToImage = A * Rt;

            Vector3 hUV1 = worldToImage.MultiplyPoint3x4(worldPnt1);
            Vector3 hUV2 = worldToImage.MultiplyPoint3x4(worldPnt2);
            Vector3 hUV3 = worldToImage.MultiplyPoint3x4(worldPnt3);
            Vector3 hUV4 = worldToImage.MultiplyPoint3x4(worldPnt4);

            //hUV are the image coordinates in 2D homogeneous coordinates, we need to normalize, i.e., divide by Z
            Vector2 uv1 = new Vector2(hUV1.x, hUV1.y) / hUV1.z;
            Vector2 uv2 = new Vector2(hUV2.x, hUV2.y) / hUV2.z;
            Vector2 uv3 = new Vector2(hUV3.x, hUV3.y) / hUV3.z;
            Vector2 uv4 = new Vector2(hUV4.x, hUV4.y) / hUV4.z;

            //don't forget to alloc before putting values into a MatOfPoint2f
            imagePoints.put(0, 0, uv1.x, camImg.Height - uv1.y);
            imagePoints.put(1, 0, uv2.x, camImg.Height - uv2.y);
            imagePoints.put(2, 0, uv3.x, camImg.Height - uv3.y);
            imagePoints.put(3, 0, uv4.x, camImg.Height - uv4.y);

            //Debug draw points
            Point imgPnt1 = new Point(imagePoints.get(0, 0));
            Point imgPnt2 = new Point(imagePoints.get(1, 0));
            Point imgPnt3 = new Point(imagePoints.get(2, 0));
            Point imgPnt4 = new Point(imagePoints.get(3, 0));
            Imgproc.circle(camImageMat, imgPnt1, 5, new Scalar(255, 0, 0, 255));
            Imgproc.circle(camImageMat, imgPnt2, 5, new Scalar(0, 255, 0, 255));
            Imgproc.circle(camImageMat, imgPnt3, 5, new Scalar(0, 0, 255, 255));
            Imgproc.circle(camImageMat, imgPnt4, 5, new Scalar(255, 255, 0, 255));
            Scalar lineCl = new Scalar(200, 120, 0, 160);
            Imgproc.line(camImageMat, imgPnt1, imgPnt2, lineCl);
            Imgproc.line(camImageMat, imgPnt2, imgPnt3, lineCl);
            Imgproc.line(camImageMat, imgPnt3, imgPnt4, lineCl);
            Imgproc.line(camImageMat, imgPnt4, imgPnt1, lineCl);


            var destPoints = new MatOfPoint2f();             // Creating a destination
            destPoints.alloc(4);
            destPoints.put(0, 0, width, 0);
            destPoints.put(1, 0, width, height);
            destPoints.put(2, 0, 0, height);
            destPoints.put(3, 0, 0, 0);

            var homography = Calib3d.findHomography(imagePoints, destPoints);             // Finding the image

            Imgproc.warpPerspective(camImageMat, destPoints, homography, new Size(camImageMat.width(), camImageMat.height()));

            unwarpedTexture = unwarpedTextureClean;

            MatDisplay.MatToTexture(destPoints, ref unwarpedTexture);             // Take output and transform into texture

            if (Input.GetKey("space"))
            {
                fish.GetComponent <Renderer>().material.mainTexture = unwarpedTexture;
            }
            else
            {
                fish.GetComponent <Renderer>().material.mainTexture = tex;
            }

            MatDisplay.DisplayMat(destPoints, MatDisplaySettings.BOTTOM_LEFT);
            MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND);
        }
    }
Beispiel #9
0
    void Update()
    {
        // Camera image from Vuforia
        Image camImg = CameraDevice.Instance.GetCameraImage(PIXEL_FORMAT.RGBA8888);

        if (camImg != null && camImg.Height > 0)
        {
            if (camImageMat == null)
            {
                // Vuforia seems to enforce a resolution of width=640px for any camera
                Debug.Log("rows: " + camImg.Height + ", cols: " + camImg.Width);
                camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4);
            }

            // Put Vuforia camera feed pixels into OpenCV display matrix
            camImageMat.put(0, 0, camImg.Pixels);


            // DEBUG TEST: In OpenCV, we operate in screen coordinates (pixels),
            // and we know the resolution of the Vuforia camera
            // Here, we draw a red circle in screen space using OpenCV
            //Imgproc.circle(camImageMat, new Point(300, 200), 20, new Scalar(255, 0, 0, 128));


            //---- <THIS IS WHERE THE CORNER PROJECTION BEGINS> ----

            // Get corner's position in world coordinates
            Matrix4x4 m1        = corner1.transform.localToWorldMatrix;
            Matrix4x4 m2        = corner2.transform.localToWorldMatrix;
            Matrix4x4 m3        = corner3.transform.localToWorldMatrix;
            Matrix4x4 m4        = corner4.transform.localToWorldMatrix;
            Vector3   worldPnt1 = m1.MultiplyPoint3x4(corner1.transform.position);
            Vector3   worldPnt2 = m2.MultiplyPoint3x4(corner2.transform.position);
            Vector3   worldPnt3 = m3.MultiplyPoint3x4(corner3.transform.position);
            Vector3   worldPnt4 = m4.MultiplyPoint3x4(corner4.transform.position);

            // Matrix that goes from world to the camera coordinate system
            Matrix4x4 Rt = cam.transform.worldToLocalMatrix;

            // Camera intrinsics
            Matrix4x4 A = Matrix4x4.identity;
            A.m00 = fx;
            A.m11 = fy;
            A.m02 = cx;
            A.m12 = cy;
            //see cheat sheet

            Matrix4x4 worldToImage = A * Rt;

            Vector3 hUV1 = worldToImage.MultiplyPoint3x4(worldPnt1);
            Vector3 hUV2 = worldToImage.MultiplyPoint3x4(worldPnt2);
            Vector3 hUV3 = worldToImage.MultiplyPoint3x4(worldPnt3);
            Vector3 hUV4 = worldToImage.MultiplyPoint3x4(worldPnt4);

            // Remember that we dealing with homogeneous coordinates.
            // Here we normalize them to get Image coordinates
            Vector2 uv1 = new Vector2(hUV1.x, hUV1.y) / hUV1.z;
            Vector2 uv2 = new Vector2(hUV2.x, hUV2.y) / hUV2.z;
            Vector2 uv3 = new Vector2(hUV3.x, hUV3.y) / hUV3.z;
            Vector2 uv4 = new Vector2(hUV4.x, hUV4.y) / hUV4.z;

            // We flip the v-coordinate of our image points to make the Unity (Vuforia) data compatible with OpenCV
            // Remember that in OpenCV the (0,0) pos is in the top left corner in contrast to the bottom left corner
            float maxV = camImg.Height - 1; // The -1 is because pixel coordinates are 0-indexed
            imagePoints.put(0, 0, uv1.x, maxV - uv1.y);
            imagePoints.put(1, 0, uv2.x, maxV - uv2.y);
            imagePoints.put(2, 0, uv3.x, maxV - uv3.y);
            imagePoints.put(3, 0, uv4.x, maxV - uv4.y);

            Point imgPnt1 = new Point(imagePoints.get(0, 0));
            Point imgPnt2 = new Point(imagePoints.get(1, 0));
            Point imgPnt3 = new Point(imagePoints.get(2, 0));
            Point imgPnt4 = new Point(imagePoints.get(3, 0));

            //For debug. Show if impPnti found the right position in img coordinate
            Imgproc.circle(camImageMat, imgPnt1, 10, new Scalar(255, 0, 0, 200), 5);
            Imgproc.circle(camImageMat, imgPnt2, 20, new Scalar(255, 255, 0, 255), 5);
            Imgproc.circle(camImageMat, imgPnt3, 30, new Scalar(0, 255, 0, 255), 5);
            Imgproc.circle(camImageMat, imgPnt4, 40, new Scalar(0, 0, 255, 255), 4);


            MatOfPoint2f unwarpPoints;
            unwarpPoints = new MatOfPoint2f();
            unwarpPoints.alloc(4);
            //according to the resolution
            unwarpPoints.put(0, 0, 0, 0);
            unwarpPoints.put(1, 0, 0, 442);
            unwarpPoints.put(2, 0, 442, 442);
            unwarpPoints.put(3, 0, 442, 0);
            //compute homography matrix

            Mat H    = Calib3d.findHomography(imagePoints, unwarpPoints);
            Mat Hinv = H.inv();
            Mat dst  = new Mat(442, 442, CvType.CV_8UC4);
            texMat = MatDisplay.LoadRGBATexture("/models/dog_tex.png");
            Imgproc.warpPerspective(texMat, dst, Hinv, new Size(442, 442));

            // MatDisplay.MatToTexture(dst, ref tex);
            //rd.material.mainTexture = tex;
            //Debug.Log(imgPnt2);
            //Debug.Log(imgPnt2);
            //---- </THIS IS WHERE THE CORNER PROJECTION ENDS> ----
            // Display the Mat that includes video feed and debug points
            // Do not forget to disable Vuforia's video background and change your aspect ratio to 4:3!
            MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND);



            //---- MATCH INTRINSICS OF REAL CAMERA AND PROJECTION MATRIX OF VIRTUAL CAMERA ----
            // See lecture slides for why this formular works.
            cam.fieldOfView = 2 * Mathf.Atan(camImg.Height * 0.5f / fy) * Mathf.Rad2Deg;
        }
    }
Beispiel #10
0
    // Update is called once per frame
    void Update()
    {
        MatDisplay.SetCameraFoV(41.5f);

        Image cameraImage = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888);

        if (cameraImage == null)
        {
            return;
        }
        if (_cameraImageMat == null)
        {
            //First frame -> generate Mat with same dimensions as camera feed
            _cameraImageMat = new Mat(cameraImage.Height, cameraImage.Width, CvType.CV_8UC4);
        }
        _cameraImageMat.put(0, 0, cameraImage.Pixels); // transferring image data to Mat

        if (AlphabetTarget.GetComponent <ImageTargetBehaviour>().CurrentStatus != TrackableBehaviour.Status.TRACKED)
        {
            MatDisplay.DisplayMat(_cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
            return;
        }
        var fingerColorMat = FindFingerColor();
        var test           = Send.position;

        test.y = -test.y;
        var screenPosSend = Camera.WorldToScreenPoint(test);
        var value         = fingerColorMat.get((int)screenPosSend.y, (int)screenPosSend.x);

        //Debug.Log($"Circle: {screenPosSend}");
        //Debug.Log($"backspace: {Camera.WorldToScreenPoint(KeyboardPos[27].position)}");
        //Debug.Log($"backspace: {Camera.WorldToScreenPoint(KeyboardPos[20].position)}");

        //Debug.Log($"{value[0]}");

        // Check if value at finger is white (which means that finger is present)

        try
        {
            var fingerPointInWorldSpace = FingerPointInWorldSpace(fingerColorMat);
            FingerPlane.position = fingerPointInWorldSpace;

            if ((int)value[0] > 250 && !_keyPressed)
            {
                StartCoroutine(DelayTyping());
                var oldDistance = float.MaxValue;
                var letter      = string.Empty;

                var maxDistance = Vector3.Distance(Camera.WorldToScreenPoint(KeyboardPos[0].position),
                                                   Camera.WorldToScreenPoint(KeyboardPos[7].position));

                KeyboardPos.ForEach(x =>
                {
                    var worldToScreenPoint = Camera.WorldToScreenPoint(x.position);
                    var distance           = Vector3.Distance(Camera.WorldToScreenPoint(fingerPointInWorldSpace),
                                                              worldToScreenPoint);
                    if (distance > oldDistance || distance > maxDistance)
                    {
                        return;
                    }
                    letter      = x.name;
                    oldDistance = distance;
                });
                Debug.Log(letter);
                switch (letter)
                {
                case "BackSpace":
                    Text.text = Text.text.Remove(Text.text.Length - 1);
                    break;

                case "Space":
                    Text.text += " ";
                    break;

                default:
                    Text.text += letter;
                    break;
                }
            }
        }
        catch
        {
        }

        MatDisplay.DisplayMat(_cameraImageMat, MatDisplaySettings.FULL_BACKGROUND);
    }