Пример #1
0
    // Use this for initialization
    void Start()
    {
        Texture2D boxTexture        = Resources.Load <Texture2D>("box");
        Texture2D boxInSceneTexture = Resources.Load <Texture2D>("box_in_scene");

        Mat box3Channels = new Mat();

        TextureConvert.Texture2dToOutputArray(boxTexture, box3Channels);
        Mat box = new Mat();

        CvInvoke.CvtColor(box3Channels, box, ColorConversion.Bgra2Gray);
        CvInvoke.Flip(box, box, FlipType.Vertical);

        Mat boxInScene3Channels = new Mat();

        TextureConvert.Texture2dToOutputArray(boxInSceneTexture, boxInScene3Channels);
        Mat boxInScene = new Mat();

        CvInvoke.CvtColor(boxInScene3Channels, boxInScene, ColorConversion.Bgra2Gray);
        CvInvoke.Flip(boxInScene, boxInScene, FlipType.Vertical);

        long time;
        Mat  img = FeatureMatchingExample.DrawMatches.Draw(box, boxInScene, out time);
        //CvInvoke.Imwrite("c:\\tmp\\tmp.png", img);
        //Mat outImg = new Mat();
        //CvInvoke.CvtColor(img, outImg, ColorConversion.Bgr2Bgra);
        //CvInvoke.Imwrite("c:\\tmp\\tmp.png", outImg);
        Texture2D texture = TextureConvert.InputArrayToTexture2D(img);

        this.GetComponent <GUITexture>().texture = texture;

        this.GetComponent <GUITexture>().pixelInset = new Rect(-texture.width / 2, -texture.height / 2, texture.width, texture.height);
    }
Пример #2
0
    // Use this for initialization
    void Start()
    {
        String[] textureNames = new string[] { "stitch1", "stitch2", "stitch3", "stitch4" };
        Mat[]    imgs         = new Mat[textureNames.Length];
        Mat      tmp          = new Mat();

        for (int i = 0; i < textureNames.Length; i++)
        {
            Texture2D tex = Resources.Load <Texture2D>(textureNames[i]);
            imgs[i] = new Mat();
            TextureConvert.Texture2dToOutputArray(tex, tmp);
            CvInvoke.Flip(tmp, tmp, FlipType.Vertical);
            CvInvoke.CvtColor(tmp, imgs[i], ColorConversion.Bgra2Bgr);
            if (imgs[i].IsEmpty)
            {
                Debug.Log("Image " + i + " is empty");
            }
            else
            {
                Debug.Log("Image " + i + " is " + imgs[i].NumberOfChannels + " channels " + imgs[i].Width + "x" + imgs[i].Height);
            }
        }
        Emgu.CV.Stitching.Stitcher stitcher = new Emgu.CV.Stitching.Stitcher();
        Mat result = new Mat();

        using (VectorOfMat vms = new VectorOfMat(imgs))
            stitcher.Stitch(vms, result);
        //CvInvoke.Flip(result, result, FlipType.Vertical);

        Texture2D texture = TextureConvert.InputArrayToTexture2D(result, FlipType.Vertical);

        RenderTexture(texture);
        ResizeTexture(texture);
    }
Пример #3
0
    // Use this for initialization
    void Start()
    {
        Texture2D boxTexture        = Resources.Load <Texture2D>("box");
        Texture2D boxInSceneTexture = Resources.Load <Texture2D>("box_in_scene");

        Mat box3Channels = new Mat();

        TextureConvert.Texture2dToOutputArray(boxTexture, box3Channels);
        Mat box = new Mat();

        CvInvoke.CvtColor(box3Channels, box, ColorConversion.Bgra2Gray);
        CvInvoke.Flip(box, box, FlipType.Vertical);

        Mat boxInScene3Channels = new Mat();

        TextureConvert.Texture2dToOutputArray(boxInSceneTexture, boxInScene3Channels);
        Mat boxInScene = new Mat();

        CvInvoke.CvtColor(boxInScene3Channels, boxInScene, ColorConversion.Bgra2Gray);
        CvInvoke.Flip(boxInScene, boxInScene, FlipType.Vertical);

        long time;
        Mat  img = FeatureMatchingExample.DrawMatches.Draw(box, boxInScene, out time);
        //CvInvoke.Imwrite("c:\\tmp\\tmp.png", img);
        //Mat outImg = new Mat();
        //CvInvoke.CvtColor(img, outImg, ColorConversion.Bgr2Bgra);
        //CvInvoke.Imwrite("c:\\tmp\\tmp.png", outImg);
        Texture2D texture = TextureConvert.InputArrayToTexture2D(img);

        RenderTexture(texture);
        ResizeTexture(texture);
    }
    // Update is called once per frame
    void Update()
    {
        // Update texture with webcam image.
        copyTexture = new Texture2D(webcamTexture.width, webcamTexture.height);
        copyTexture.SetPixels32(webcamTexture.GetPixels32());
        copyTexture.Apply();
        // Convert to Image to be used in image manipulation.
        TextureConvert.Texture2dToOutputArray(copyTexture, img);
        // This will appear upside down, so flip it.
        CvInvoke.Flip(img, img, FlipType.Vertical);

        // Use statement - improves performance.
        using (UMat gray = new UMat())
        {
            // Convert image to gray scale.
            CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);
            // Equalise the lighting.
            CvInvoke.EqualizeHist(gray, gray);
            //CvInvoke.CvtColor(img, img, ColorConversion.Bgr2Gray);

            // Rectanlges which highlight where the face is in the image.
            Rectangle[] faces = null;

            // Detect faces in image.
            faces = faceCascade.DetectMultiScale(gray);
            foreach (Rectangle face in faces)
            {
                using (UMat faceGray = new UMat(img, face))
                {
                    // Draw a green rectangle around the found area.
                    CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
                    // Convert ROI to gray-scale.
                    CvInvoke.CvtColor(faceGray, faceGray, ColorConversion.Bgr2Gray);
                    // Convert image to canny to detect edges.
                    CvInvoke.Canny(faceGray, faceGray, 30, 128, 3, false);
                    // Hierarchy order of contours.
                    //hierarchy = CvInvoke.FindContourTree(faceGray, contours, ChainApproxMethod.ChainApproxSimple);
                    // Find the contours in the ROI area.
                    CvInvoke.FindContours(faceGray, contours, null, RetrType.List, ChainApproxMethod.ChainApproxSimple, face.Location);
                    for (int i = 0; i < contours.Size; ++i)
                    {
                        CvInvoke.DrawContours(img, contours, i, new MCvScalar(255, 255, 255));
                    }
                }
            }
        }

        // Update the result texture.
        //Texture2D texture
        resultTexture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);
        GetComponent <GUITexture>().texture = resultTexture;
        Size s = img.Size;

        GetComponent <GUITexture>().pixelInset = new Rect(s.Width / 2, -s.Height / 2, s.Width, s.Height);
    }
Пример #5
0
    // Use this for initialization
    void Start()
    {
        Texture2D lenaTexture = Resources.Load <Texture2D>("lena");

        UMat img = new UMat();

        TextureConvert.Texture2dToOutputArray(lenaTexture, img);
        CvInvoke.Flip(img, img, FlipType.Vertical);

        //String fileName = "haarcascade_frontalface_default";
        //String fileName = "lbpcascade_frontalface";
        String fileName = "haarcascade_frontalface_alt2";
        String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
        //if (!File.Exists(filePath))
        {
            TextAsset cascadeModel = Resources.Load <TextAsset>(fileName);

#if UNITY_METRO
            UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
            File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
        }

        using (CascadeClassifier classifier = new CascadeClassifier(filePath))
            using (UMat gray = new UMat())
            {
                CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);

                Rectangle[] faces = null;
                try
                {
                    faces = classifier.DetectMultiScale(gray);

                    foreach (Rectangle face in faces)
                    {
                        CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
                    }
                }
                catch (Exception e)
                {
                    Debug.Log(e.Message);

                    return;
                }
            }

        Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);

        this.GetComponent <GUITexture>().texture = texture;
        Size s = img.Size;
        this.GetComponent <GUITexture>().pixelInset = new Rect(-s.Width / 2, -s.Height / 2, s.Width, s.Height);
    }
Пример #6
0
    void Track()
    {
        if (lastPositions != null)
        {
            lastPositions = landmarks;
        }

        // We fetch webcam texture data
        convertedTexture.SetPixels(webcamTexture.GetPixels());
        convertedTexture.Apply();

        // We convert the webcam texture2D into the OpenCV image format
        UMat img = new UMat();

        TextureConvert.Texture2dToOutputArray(convertedTexture, img);
        CvInvoke.Flip(img, img, FlipType.Vertical);

        using (CascadeClassifier classifier = new CascadeClassifier(filePath)) {
            using (UMat gray = new UMat()) {
                // We convert the OpenCV image format to the facial detection API parsable monochrome image type and detect the faces
                CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);
                facesVV   = new VectorOfRect(classifier.DetectMultiScale(gray));
                landmarks = new VectorOfVectorOfPointF();

                // we fit facial landmarks onto the face data
                if (facemark.Fit(gray, facesVV, landmarks))
                {
                    FaceInvoke.DrawFacemarks(img, landmarks[0], new MCvScalar(255, 255, 0, 255));

                    // We calculate the nose position to use as a capture center
                    noseOffset = new Vector3(landmarks[0][67].X, landmarks[0][67].Y * -1f, 0f);

                    // We draw markers and computer positions
                    for (int j = 0; j < 68; j++)
                    {
                        Vector3 markerPos = new Vector3(landmarks[0][j].X, landmarks[0][j].Y * -1f, 0f);

                        if (displayOffsetMarkers)
                        {
                            Debug.DrawLine(markerPos, markerPos + (Vector3.forward * 3f), UnityEngine.Color.green, trackingInterval);
                        }

                        AdjustCalibration(j, markerPos);
                    }
                    recording = true;
                }
                else
                {
                    recording = false;
                }

                if (displayCalibrationMarkers)
                {
                    DisplayCalibration();
                }
            }
        }

        // We render out the calculation result into the debug image
        if (debugImage)
        {
            Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);
            debugImage.sprite = Sprite.Create(texture, new Rect(0, 0, texture.width, texture.height), new Vector2(0.5f, 0.5f));
        }
    }
Пример #7
0
    // Use this for initialization
    void Start()
    {
        Texture2D lenaTexture = Resources.Load <Texture2D>("lena");

        //updateTextureWithString("load lena ok");
        Mat img = new Mat();

        TextureConvert.Texture2dToOutputArray(lenaTexture, img);
        CvInvoke.Flip(img, img, FlipType.Vertical);

        //updateTextureWithString("convert to image ok");

        //String fileName = "haarcascade_frontalface_default";
        //String fileName = "lbpcascade_frontalface";
        String fileName = "haarcascade_frontalface_alt2";
        String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
        //if (!File.Exists(filePath))
        {
            //updateTextureWithString("start move cascade xml");
            TextAsset cascadeModel = Resources.Load <TextAsset>(fileName);

#if UNITY_METRO
            UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
            File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
            //updateTextureWithString("File size: " + new FileInfo(filePath).Length);
        }


        using (CascadeClassifier classifier = new CascadeClassifier(filePath))
            using (Mat gray = new Mat())
            {
                CvInvoke.CvtColor(img, gray, ColorConversion.Bgr2Gray);
                //updateTextureWithString("classifier create ok");

                Rectangle[] faces = null;
                try
                {
                    faces = classifier.DetectMultiScale(gray);

                    //updateTextureWithString("face detected");
                    foreach (Rectangle face in faces)
                    {
                        CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
                    }
                }
                catch (Exception e)
                {
                    //updateTextureWithString(e.Message);
                    return;
                }

                //updateTextureWithString(String.Format("{0} face found on image of {1} x {2}", faces.Length, img.Width, img.Height));
            }

        Texture2D texture = TextureConvert.InputArrayToTexture2D(img, FlipType.Vertical);

        this.GetComponent <GUITexture>().texture    = texture;
        this.GetComponent <GUITexture>().pixelInset = new Rect(-img.Width / 2, -img.Height / 2, img.Width, img.Height);
    }