Ejemplo n.º 1
0
    //漫水填充?
    void OnFloodFill()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/cv.png", 1);
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGBA);

        Mat mask = new Mat();

        mask.create(srcMat.rows() + 2, srcMat.cols() + 2, CvType.CV_8UC1);
        for (int i = 0; i < 60; i++)
        {
            for (int j = 0; j < 60; j++)
            {
                //mask.at<uchar>(i, j) = 255;
            }
        }

        //OpenCVForUnity.Rect ccomp = new OpenCVForUnity.Rect(20, 20, 30, 30);
        //Point seedPoint = new Point(50, 300);
        //Imgproc.floodFill(srcMat, srcMat, seedPoint, new Scalar(255, 255, 0), ccomp, new Scalar(20, 30, 40, 0), new Scalar(2, 30, 40, 0), 0);

        Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height());
        Sprite    sp  = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_showImage.sprite         = sp;
        m_showImage.preserveAspect = true;
        Utils.matToTexture2D(srcMat, t2d);
    }
Ejemplo n.º 2
0
        void CreatComponentMarkerLess()
        {
            InformationObjectList informationObjectList = JsonUtility.FromJson <InformationObjectList>(PlayerPrefs.GetString(PropertiesModel.NameBDMarkerLessPlayerPrefab));

            if (informationObjectList == null)
            {
                return;
            }

            foreach (InformationObject informationObject in informationObjectList.ListInformationObject)
            {
                patternMat = Imgcodecs.imread(informationObject.ImagePathMarkerLess);

                if (patternMat.total() > 0)
                {
                    pattern = new Pattern();

                    PatternDetector patternDetector = new PatternDetector(null, null, null, true);
                    patternDetector.buildPatternFromImage(patternMat, pattern);
                    patternDetector.train(pattern);

                    CreateComponent(informationObject, patternDetector);
                }
            }
        }
Ejemplo n.º 3
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/lena.jpg", 1);
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB); //转RGB

        OnOrig(true);
    }
Ejemplo n.º 4
0
    void Start()
    {
        dstMat = new Mat();
        p1Mat  = Imgcodecs.imread(Application.dataPath + "/Textures/1.jpg", 1);
        p2Mat  = Imgcodecs.imread(Application.dataPath + "/Textures/3.jpg", 1);
        Imgproc.cvtColor(p1Mat, p1Mat, Imgproc.COLOR_BGR2RGB);
        Imgproc.cvtColor(p2Mat, p2Mat, Imgproc.COLOR_BGR2RGB);
        Imgproc.resize(p2Mat, p2Mat, new Size(p1Mat.width(), p1Mat.height()));
        Debug.Log(p2Mat);

        Point p1        = new Point(50, 125);
        Point p2        = new Point(p1Mat.size().width - 50, 45);
        int   thickness = 2;

        Imgproc.line(p1Mat, p1, p2, new Scalar(255, 0, 0), thickness);
        //Imgproc.circle(srcMat, pt, 3, new Scalar(255, 255, 0), -1, 8, 0); //绘制圆心
        //Imgproc.circle(srcMat, pt, (int)rho, new Scalar(255, 0, 0, 255), 5); //绘制圆轮廓

        Texture2D t2d = new Texture2D(p1Mat.width(), p1Mat.height());

        Utils.matToTexture2D(p1Mat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_dstImage.sprite = sp;
    }
Ejemplo n.º 5
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/team.jpg", 1); //512
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);
        grayMat = new Mat();
        Imgproc.cvtColor(srcMat, grayMat, Imgproc.COLOR_RGB2GRAY);

        MatOfRect faces = new MatOfRect();

        haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.Utils.getFilePath("haarcascade_frontalface_alt.xml");
        cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
        if (cascade != null)
        {
            //cascade.detectMultiScale(grayMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(grayMat.cols() * 0.15, grayMat.cols() * 0.15), new Size());
            cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, new Size(20, 20), new Size());
        }

        OpenCVForUnity.Rect[] rects = faces.toArray();
        for (int i = 0; i < rects.Length; i++)
        {
            Debug.Log("detect faces " + rects[i]);
            Imgproc.rectangle(srcMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
        }

        Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height());

        Utils.matToTexture2D(srcMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_srcImage.sprite         = sp;
        m_srcImage.preserveAspect = true;
    }
        // Use this for initialization
        void Start()
        {
//            Utils.setDebugMode(true);

            using (Mat patternMat = Imgcodecs.imread(Application.persistentDataPath + "/patternImg.jpg")) {
                if (patternMat.total() == 0)
                {
                    patternRawImage.gameObject.SetActive(false);
                }
                else
                {
                    Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_BGR2RGB);

                    Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);

                    Utils.matToTexture2D(patternMat, patternTexture);

                    patternRawImage.texture = patternTexture;
                    patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);

                    patternRawImage.gameObject.SetActive(true);
                }
            }

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();
            webCamTextureToMatHelper.Init();


            detector = ORB.create();
            detector.setMaxFeatures(1000);
            keypoints = new MatOfKeyPoint();
        }
Ejemplo n.º 7
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Resources/aragaki.jpg", 1);
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);

        dstMat = srcMat.clone();

        ///* 仿射变换
        Point        srcPoint0 = new Point(0, 0);
        Point        srcPoint1 = new Point(srcMat.width() - 1, 0);
        Point        srcPoint2 = new Point(0, srcMat.height() - 1);
        MatOfPoint2f srcTri    = new MatOfPoint2f(new Point[3] {
            srcPoint0, srcPoint1, srcPoint2
        });
        Point        dstPoint0 = new Point(0, srcMat.height() * 0.33d);
        Point        dstPoint1 = new Point(srcMat.width() * 0.85d, srcMat.height() * 0.25d);
        Point        dstPoint2 = new Point(srcMat.width() * 0.15d, srcMat.height() * 0.7d);
        MatOfPoint2f dstTri    = new MatOfPoint2f(new Point[3] {
            dstPoint0, dstPoint1, dstPoint2
        });

        warpMat = Imgproc.getAffineTransform(srcTri, dstTri);
        Imgproc.warpAffine(srcMat, dstMat, warpMat, new Size(dstMat.width(), dstMat.height()));
        //*/

        /* 旋转变换
         * //拷贝整个画布
         * dstMat.copyTo(srcMat);
         * Point center = new Point(srcMat.width() / 2, srcMat.height() / 2);
         * double angle = -50.0d;
         * double scale = -0.6d;
         * rotMat = Imgproc.getRotationMatrix2D(center, angle, scale);
         * Imgproc.warpAffine(srcMat, dstMat, rotMat, new Size(dstMat.width(), dstMat.height()));
         */

        /*
         * //透视变换
         * Point srcPoint0 = new Point(0, 0);
         * Point srcPoint1 = new Point(srcMat.width() - 1, 0);
         * Point srcPoint2 = new Point(0, srcMat.height() - 1);
         * Point srcPoint3 = new Point(srcMat.width() - 1, srcMat.height() - 1);
         * MatOfPoint2f srcTri = new MatOfPoint2f(new Point[4] { srcPoint0, srcPoint1, srcPoint2, srcPoint3 });
         * Point dstPoint0 = new Point(srcMat.width() * 0.05d, srcMat.height() * 0.33d);
         * Point dstPoint1 = new Point(srcMat.width() * 0.9d, srcMat.height() * 0.25d);
         * Point dstPoint2 = new Point(srcMat.width() * 0.2d, srcMat.height() * 0.7d);
         * Point dstPoint3 = new Point(srcMat.width() * 0.8d, srcMat.height() * 0.9d);
         * MatOfPoint2f dstTri = new MatOfPoint2f(new Point[4] { dstPoint0, dstPoint1, dstPoint2, dstPoint3 });
         * warpMat = Imgproc.getPerspectiveTransform(srcTri, dstTri);
         * Imgproc.warpPerspective(srcMat, dstMat, warpMat, new Size(dstMat.width(), dstMat.height()));
         */

        Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height());

        Utils.matToTexture2D(dstMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        dstImage.sprite         = sp;
        dstImage.preserveAspect = true;
    }
        private void Run()
        {
            List <Mat> images     = new List <Mat> ();
            List <int> labelsList = new List <int> ();
            MatOfInt   labels     = new MatOfInt();

            images.Add(Imgcodecs.imread(image_0_filepath, 0));
            images.Add(Imgcodecs.imread(image_1_filepath, 0));
            labelsList.Add(0);
            labelsList.Add(1);
            labels.fromList(labelsList);

            Mat testSampleMat   = Imgcodecs.imread(sample_image_filepath, 0);
            int testSampleLabel = 0;


            //                      foreach (Mat item in images) {
            //                              Debug.Log ("images.ToString " + item.ToString ());
            //                      }
            //                      foreach (int item in labelsList) {
            //                              Debug.Log ("labels.ToString " + item.ToString ());
            //                      }

            int[]    predictedLabel      = new int[1];
            double[] predictedConfidence = new double[1];


            BasicFaceRecognizer faceRecognizer = EigenFaceRecognizer.create();

            faceRecognizer.train(images, labels);

            faceRecognizer.predict(testSampleMat, predictedLabel, predictedConfidence);


            Debug.Log("Predicted class: " + predictedLabel [0] + " / " + "Actual class: " + testSampleLabel);
            Debug.Log("Confidence: " + predictedConfidence [0]);


            Mat predictedMat = images [predictedLabel [0]];

            Mat baseMat = new Mat(testSampleMat.rows(), predictedMat.cols() + testSampleMat.cols(), CvType.CV_8UC1);

            predictedMat.copyTo(baseMat.submat(new OpenCVForUnity.CoreModule.Rect(0, 0, predictedMat.cols(), predictedMat.rows())));
            testSampleMat.copyTo(baseMat.submat(new OpenCVForUnity.CoreModule.Rect(predictedMat.cols(), 0, testSampleMat.cols(), testSampleMat.rows())));

            Imgproc.putText(baseMat, "Predicted", new Point(10, 15), Imgproc.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255), 1, Imgproc.LINE_AA, false);
            Imgproc.putText(baseMat, "Confidence:", new Point(5, 25), Imgproc.FONT_HERSHEY_SIMPLEX, 0.2, new Scalar(255), 1, Imgproc.LINE_AA, false);
            Imgproc.putText(baseMat, "   " + predictedConfidence [0], new Point(5, 33), Imgproc.FONT_HERSHEY_SIMPLEX, 0.2, new Scalar(255), 1, Imgproc.LINE_AA, false);
            Imgproc.putText(baseMat, "TestSample", new Point(predictedMat.cols() + 10, 15), Imgproc.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255), 1, Imgproc.LINE_AA, false);


            Texture2D texture = new Texture2D(baseMat.cols(), baseMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(baseMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }
Ejemplo n.º 9
0
    void Start()
    {
        Mat srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/sample.jpg");

        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGBA);
        //Debug.Log(srcMat.channels()); //4

        //提取通道
        List <Mat> channels = new List <Mat>();

        Core.split(srcMat, channels);

        byte[] byteArrayR = new byte[channels[0].width() * channels[0].height()];
        byte[] byteArrayG = new byte[channels[0].width() * channels[0].height()];
        byte[] byteArrayB = new byte[channels[0].width() * channels[0].height()];
        byte[] byteArrayA = new byte[channels[0].width() * channels[0].height()];
        Utils.copyFromMat <byte>(channels[0], byteArrayR);
        Utils.copyFromMat <byte>(channels[1], byteArrayG);
        Utils.copyFromMat <byte>(channels[2], byteArrayB);

        //遍历像素
        int width  = srcMat.width();
        int height = srcMat.height();

        for (int y = 0; y < height; y++)
        {
            for (int x = 0; x < width; x++)
            {
                int rValue = byteArrayR[x + width * y];
                int gValue = byteArrayG[x + width * y];
                int bValue = byteArrayB[x + width * y];

                if (bValue >= thread && gValue >= thread && rValue >= thread)
                {
                    byteArrayA[x + width * y] = (byte)(0);
                }
                else
                {
                    byteArrayA[x + width * y] = (byte)(255);
                }
            }
        }

        //拷贝回Mat
        Utils.copyToMat(byteArrayA, channels[3]);
        //合并通道
        Core.merge(channels, srcMat);

        Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(srcMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_dstImage.sprite         = sp;
        m_dstImage.preserveAspect = true;
    }
Ejemplo n.º 10
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/kizuna.jpg", 1);
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB); //转RGB
        aspectRatioFitter.aspectRatio = (float)srcMat.width() / (float)srcMat.height();

        Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height());

        Utils.matToTexture2D(srcMat, t2d);
        targetImage.texture = t2d;
    }
    public static Mat LoadImgToMat(string path)
    {
        Mat img = Imgcodecs.imread(Utils.getFilePath(path));

        if (img.empty())
        {
            Debug.LogError("image is not loaded");
            return(img);
        }
        return(img);
    }
Ejemplo n.º 12
0
    void Start()
    {
        //训练集路径
        haarcascade_frontalface_default_xml_filepath = Application.dataPath + "/Cascades/haarcascade_frontalface_default.xml";
        haarcascade_eye_xml_filepath = Application.dataPath + "/Cascades/haarcascade_eye.xml";

        //读取原图
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/face.jpg", 1);
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);
        //转为灰度图像
        grayMat = new Mat();
        Imgproc.cvtColor(srcMat, grayMat, Imgproc.COLOR_RGB2GRAY);

        //检测图像中的所有脸
        MatOfRect         faces   = new MatOfRect();
        CascadeClassifier cascade = new CascadeClassifier(haarcascade_frontalface_default_xml_filepath);

        cascade.detectMultiScale(grayMat, faces, 1.1d, 2, 2, new Size(20, 20), new Size());
        //Debug.Log(faces); //检测到多少个脸 [ elemSize*1*CV_32SC4, isCont=True, isSubmat=False, nativeObj=0x1128611568, dataAddr=0x0 ]
        OpenCVForUnity.Rect[] rects = faces.toArray();
        for (int i = 0; i < rects.Length; i++)
        {
            //Debug.Log("detect faces " + rects[i]);
            Imgproc.rectangle(srcMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(0, 255, 0, 255), 2); //绿

            //眼长在脸上
            Mat               roi_gray_img = new Mat(grayMat, new OpenCVForUnity.Rect(0, 0, rects[i].x + rects[i].width, rects[i].y + rects[i].height));
            Mat               roi_img      = new Mat(srcMat, new OpenCVForUnity.Rect(0, 0, rects[i].x + rects[i].width, rects[i].y + rects[i].height));
            MatOfRect         eyes         = new MatOfRect();
            CascadeClassifier eyecascade   = new CascadeClassifier(haarcascade_eye_xml_filepath);
            eyecascade.detectMultiScale(roi_gray_img, eyes, 1.3d, 5, 2, new Size(20, 20), new Size()); //参数还需要找资料了解下
            //Debug.Log(eyes.elemSize());
            if (eyes.elemSize() > 0)
            {
                OpenCVForUnity.Rect[] eye_rects = eyes.toArray();
                for (int t = 0; t < eye_rects.Length; t++)
                {
                    //Debug.Log("detect eyes " + rects[t]);
                    //Imgproc.rectangle(roi_img, new Point(eye_rects[t].x, eye_rects[t].y), new Point(eye_rects[t].x + eye_rects[t].width, eye_rects[t].y + eye_rects[t].height), new Scalar(255, 255, 0, 255), 2); //黄
                    Point center = new Point((eye_rects[t].x + eye_rects[t].x + eye_rects[t].width) / 2, (eye_rects[t].y + eye_rects[t].y + eye_rects[t].height) / 2);
                    Imgproc.circle(roi_img, center, eye_rects[t].width / 2, new Scalar(255, 255, 0, 255), 2);
                }
            }
        }

        Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height());

        Utils.matToTexture2D(srcMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_srcImage.sprite         = sp;
        m_srcImage.preserveAspect = true;
    }
Ejemplo n.º 13
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/sample.jpg");
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);
        //Debug.Log(srcMat.channels()); //3

        //提取通道
        channels = new List <Mat>();
        Core.split(srcMat, channels);

        //byteArray = new byte[srcMat.width() * srcMat.height()];
        byteArrayR = new byte[channels[0].width() * channels[0].height()];
        byteArrayG = new byte[channels[0].width() * channels[0].height()];
        byteArrayB = new byte[channels[0].width() * channels[0].height()];
        Utils.copyFromMat <byte>(channels[0], byteArrayR);
        Utils.copyFromMat <byte>(channels[1], byteArrayG);
        Utils.copyFromMat <byte>(channels[2], byteArrayB);

        //遍历像素
        int width  = srcMat.width();
        int height = srcMat.height();

        for (int y = 0; y < height; y++)
        {
            for (int x = 0; x < width; x++)
            {
                //反色操作
                int rValue = 255 - byteArrayR[x + width * y];
                byteArrayR[x + width * y] = (byte)rValue;
                //Debug.Log(rValue); //r通道值

                int gValue = 255 - byteArrayG[x + width * y];
                byteArrayG[x + width * y] = (byte)gValue;

                int bValue = 255 - byteArrayB[x + width * y];
                byteArrayB[x + width * y] = (byte)bValue;
            }
        }

        //拷贝回Mat
        Utils.copyToMat(byteArrayR, channels[0]);
        Utils.copyToMat(byteArrayG, channels[1]);
        Utils.copyToMat(byteArrayB, channels[2]);
        //合并通道
        Core.merge(channels, srcMat);

        //ugui显示
        Texture2D t2d = new Texture2D(srcMat.width(), srcMat.height());
        Sprite    sp  = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_showImage.sprite = sp;
        Utils.matToTexture2D(srcMat, t2d);
    }
Ejemplo n.º 14
0
        /// <summary>
        /// Raises the load button event.
        /// </summary>
        public void OnLoadButton()
        {
            Debug.Log("User clicked [Load] button.");

            // Restart everything!
            dispose();

            if (facerecAlgorithm == "FaceRecognizer.Fisherfaces")
            {
                model = Face.createFisherFaceRecognizer();
            }
            else if (facerecAlgorithm == "FaceRecognizer.Eigenfaces")
            {
                model = Face.createEigenFaceRecognizer();
            }

            if (model == null)
            {
                Debug.LogError("ERROR: The FaceRecognizer algorithm [" + facerecAlgorithm + "] is not available in your version of OpenCV. Please update to OpenCV v2.4.1 or newer.");
                m_mode = MODES.MODE_DETECTION;
                return;
            }

            // load the train data.
            model.load(Application.temporaryCachePath + "/traindata.yml");

            int maxLabel = (int)Core.minMaxLoc(model.getLabels()).maxVal;

            if (maxLabel <= 0)
            {
                Debug.Log("load failure.");
                model.Dispose();
                model  = null;
                m_mode = MODES.MODE_DETECTION;
                return;
            }

            // Restore the save data.
            m_numPersons = maxLabel + 1;
            for (int i = 0; i < m_numPersons; ++i)
            {
                m_latestFaces.Add(i);
                preprocessedFaces.Add(Imgcodecs.imread(Application.temporaryCachePath + "/preprocessedface" + i + ".jpg", 0));
                if (preprocessedFaces [i].empty())
                {
                    preprocessedFaces [i] = new Mat(faceHeight, faceWidth, CvType.CV_8UC1, new Scalar(128));
                }
                faceLabels.Add(i);
            }

            // go to the recognition mode!
            m_mode = MODES.MODE_RECOGNITION;
        }
Ejemplo n.º 15
0
    void Update()
    {
        if (shoot == false)
        {
            genV.next();
            Global.light.transform.rotation = Quaternion.Euler(genV.light_xAngle, genV.light_yAngle, 0f);
            Global.light.GetComponent <Light> ().intensity = genV.light_intensity;
            string imgpath = Global.backImgDir + genV.backgroundIndex + ".jpg";
            Mat    imgMat2 = Imgcodecs.imread(imgpath);
            Mat    imgMat = new Mat();
            int    new_w, new_h = 3000;
            double scale = new_h / Convert.ToDouble(imgMat2.rows());

            new_w = Math.Min(Convert.ToInt32(imgMat2.cols() * scale)
                             , Convert.ToInt32(new_h / (float)Screen.height * Screen.width) - 100);
            imgNowWidth = new_w;
            Imgproc.resize(imgMat2, imgMat, new Size(Convert.ToDouble(new_w), Convert.ToDouble(new_h)));
            List <Mat> channels = new List <Mat> ();
            OpenCVForUnity.Core.split(imgMat, channels);
            Mat a = new Mat();
            a            = channels [0];
            channels [0] = channels [2];
            channels [2] = a;
            OpenCVForUnity.Core.merge(channels, imgMat);

            SpriteRenderer spr = background.GetComponent <SpriteRenderer> ();

            Texture2D texture2d = new Texture2D(new_w, new_h);

            Utils.matToTexture2D(imgMat, texture2d);
            Sprite sp = Sprite.Create(texture2d, new UnityEngine.Rect(0, 0, new_w, new_h), new Vector2(0.5f, 0.5f));               //注意居中显示采用0.5f值
            if (shouldDestroy)
            {
                DestroyImmediate(spr.sprite.texture, true);
                DestroyImmediate(spr.sprite, true);
            }
            else
            {
                shouldDestroy = true;
            }
            spr.sprite = sp;

            int imgWW = Convert.ToInt32(Screen.height / Convert.ToDouble(new_h) * new_w);
            Global.imgOnScrL = Convert.ToInt32((Screen.width - imgWW) / 2f);
            Global.imgOnScrR = imgWW + Global.imgOnScrL;

            genV.setImgSize(imgWW, Screen.height);
            genV.genPersons();
            genV.print();
            shoot = true;
        }
    }
Ejemplo n.º 16
0
    void Start()
    {
        grayMat = Imgcodecs.imread(Application.dataPath + "/Textures/kizuna.jpg", 0); //读取为灰度

        sobelImage.sprite         = SobelGradient();
        sobelImage.preserveAspect = true;

        laplaceImage.sprite         = LaplaceGradient();
        laplaceImage.preserveAspect = true;

        cannyImage.sprite         = CannyGradient();
        cannyImage.preserveAspect = true;
    }
        private bool InitializeImagesInputMode()
        {
            if (isInitialized)
            {
                DisposeCalibraton();
            }

            if (String.IsNullOrEmpty(calibrationImagesDirectory))
            {
                Debug.LogWarning("When using the images input mode, please set a calibration images directory path.");
                return(false);
            }

            string dirPath = Path.Combine(Application.streamingAssetsPath, calibrationImagesDirectory);

            if (!Directory.Exists(dirPath))
            {
                Debug.LogWarning("The directory does not exist.");
                return(false);
            }
            string[] imageFiles = GetImageFilesInDirectory(dirPath);
            if (imageFiles.Length < 1)
            {
                Debug.LogWarning("The image file does not exist.");
                return(false);
            }

            Uri    rootPath     = new Uri(Application.streamingAssetsPath + System.IO.Path.AltDirectorySeparatorChar);
            Uri    fullPath     = new Uri(imageFiles[0]);
            string relativePath = rootPath.MakeRelativeUri(fullPath).ToString();

            using (Mat gray = Imgcodecs.imread(Utils.getFilePath(relativePath), Imgcodecs.IMREAD_GRAYSCALE))
            {
                if (gray.total() == 0)
                {
                    Debug.LogWarning("Invalid image file.");
                    return(false);
                }

                using (Mat bgr = new Mat(gray.size(), CvType.CV_8UC3))
                    using (Mat bgra = new Mat(gray.size(), CvType.CV_8UC4))
                    {
                        InitializeCalibraton(gray);

                        DrawFrame(gray, bgr);
                        Imgproc.cvtColor(bgr, bgra, Imgproc.COLOR_BGR2RGBA);
                        Utils.fastMatToTexture2D(bgra, texture);
                    }
            }
            return(true);
        }
Ejemplo n.º 18
0
    void Start()
    {
        grayMat    = Imgcodecs.imread(Application.dataPath + "/Textures/feature.jpg", 0);
        outputHist = new Mat();
        images     = new List <Mat>();
        Core.split(grayMat, images);

        //定义变量计算直方图
        Mat        mask     = new Mat(); //不空即可
        MatOfInt   channels = new MatOfInt(new int[] { 1 });
        MatOfInt   histSize = new MatOfInt(new int[] { 256 });;
        MatOfFloat ranges   = new MatOfFloat(new float[] { 0, 255 });
        bool       accum    = false; //是否累积

        Imgproc.calcHist(images, channels, mask, outputHist, histSize, ranges, accum);
        Debug.Log(outputHist);

        //画出直方图
        int scale   = 1;
        Mat histPic = new Mat(256, 256 * scale, CvType.CV_8U, new Scalar(255)); //单通道255,白色

        //找到最大值和最小值
        Core.MinMaxLocResult res = Core.minMaxLoc(grayMat); //注意不要搞错Mat,是对grayMat计算
        double minValue          = res.minVal;
        double maxValue          = res.maxVal;

        Debug.Log("min:" + minValue + ", max:" + maxValue);

        //纵坐标缩放比例
        double rate = (256 / maxValue) * 1d;

        for (int i = 0; i < 256; i++)
        {
            //得到每个i和箱子的值
            //float value = outputHist.at<float>(i);
            byte[] byteArray = new byte[grayMat.width() * grayMat.height()];
            Utils.copyFromMat <byte>(grayMat, byteArray);
            float value = (float)byteArray[i];
            //Debug.Log(value);
            //画直线
            Imgproc.line(histPic, new Point(i * scale, 256), new Point(i * scale, 256 - value * rate), new Scalar(0)); //单通道0,黑色
        }

        Texture2D t2d = new Texture2D(histPic.width(), histPic.height());
        Sprite    sp  = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_showImage.sprite         = sp;
        m_showImage.preserveAspect = true;
        Utils.matToTexture2D(histPic, t2d);
    }
    protected virtual Mat LoadImage(string imageFilepath)
    {
        Mat img = Imgcodecs.imread(imageFilepath);

        if (img.empty())
        {
            Debug.LogError(imageFilepath + " is not loaded.");
            img = new Mat(480, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
        }

        Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

        return(img);
    }
Ejemplo n.º 20
0
    void Start()
    {
        patch_size       = 0.01f;
        controllerTarget = GameObject.Find("ControllerTarget");
        baseTarget       = GameObject.Find("BaseTarget");

        meshFilter = GetComponent <MeshFilter>();

        Mat heightMapImg = Imgcodecs.imread("Assets/Textures/HeightMaps/mars_height_2.jpg");

        heightmap = new Mat();

        Imgproc.cvtColor(heightMapImg, heightmap, Imgproc.COLOR_RGB2GRAY);
    }
Ejemplo n.º 21
0
    public NoodleTextureCreator()
    {
        Mat _mat_Hot    = Imgcodecs.imread(Utils.getFilePath("Noodle Images/hot-spice.jpg"));
        Mat _mat_Creamy = Imgcodecs.imread(Utils.getFilePath("Noodle Images/creamy.png"));
        Mat _mat_Thick  = Imgcodecs.imread(Utils.getFilePath("Noodle Images/thick2.jpg"));

        mat_Hot    = new Mat(_mat_Hot.size(), CvType.CV_8UC3);
        mat_Creamy = new Mat(_mat_Creamy.size(), CvType.CV_8UC3);
        mat_Thick  = new Mat(_mat_Thick.size(), CvType.CV_8UC3);

        Imgproc.cvtColor(_mat_Hot, mat_Hot, Imgproc.COLOR_BGR2RGB);
        Imgproc.cvtColor(_mat_Creamy, mat_Creamy, Imgproc.COLOR_BGRA2RGB);
        Imgproc.cvtColor(_mat_Thick, mat_Thick, Imgproc.COLOR_BGR2RGB);
    }
Ejemplo n.º 22
0
    IEnumerator CalibrateRoutine()
    {
        if (!Directory.Exists(directory))
        {
            yield break;
        }
        ;

        var files = Directory.GetFiles(directory);

        files = files.Where(e => targetExtensions.Contains(Path.GetExtension(e).ToLower())).ToArray();
        if (files.Length < 1)
        {
            yield break;
        }
        ;

        calibrator.Setup();
        for (var i = 0; i < files.Length; i++)
        {
            using (Mat gray = Imgcodecs.imread(files[i], Imgcodecs.IMREAD_GRAYSCALE))
            {
                if (i == 0)
                {
                    Init(gray);
                }
                calibrator.Calibrate(gray);
                if (draw)
                {
                    Imgproc.cvtColor(gray, rgbMat, Imgproc.COLOR_GRAY2RGB);
                    calibrator.Draw(gray, rgbMat);
                    Utils.matToTexture2D(rgbMat, texture);
                    renderer.material.mainTexture = texture;
                }

                print("progress : " + (i + 1) + " / " + files.Length);
                yield return(new WaitForSeconds(interval));
            }
        }
        if (autoSave)
        {
            calibrator.Save(IOHandler.IntoStreamingAssets(fileName));
        }

        calibrator.Clear();

        print("Complete Calibration");
        yield break;
    }
Ejemplo n.º 23
0
    private void Run()
    {
        List <Mat> images     = new List <Mat>();
        List <int> labelsList = new List <int>();
        MatOfInt   labels     = new MatOfInt();

        images.Add(Imgcodecs.imread(facerec_0_bmp_filepath, 0));
        images.Add(Imgcodecs.imread(facerec_1_bmp_filepath, 0));

        labelsList.Add(0); //积极
        labelsList.Add(1); //消极
        labels.fromList(labelsList);

        Mat testSampleMat = Imgcodecs.imread(facerec_sample_bmp_filepath, 0);

        int testSampleLabel = 0;

        int[]    predictedLabel      = new int[1];
        double[] predictedConfidence = new double[1];

        BasicFaceRecognizer faceRecognizer = EigenFaceRecognizer.create();

        //faceRecognizer.train(images, labels); //会清空之前模型
        faceRecognizer.update(images, labels); //不会清空load的模型
        faceRecognizer.predict(testSampleMat, predictedLabel, predictedConfidence);
        //faceRecognizer.save(Application.dataPath + "/Cascades/train_face.txt"); //生成一个txt

        Debug.Log("Predicted class: " + predictedLabel[0] + " / " + "Actual class: " + testSampleLabel);
        Debug.Log("Confidence: " + predictedConfidence[0]);

        Mat predictedMat = images[predictedLabel[0]];
        Mat baseMat      = new Mat(testSampleMat.rows(), predictedMat.cols() + testSampleMat.cols(), CvType.CV_8UC1);

        predictedMat.copyTo(baseMat.submat(new OpenCVForUnity.Rect(0, 0, predictedMat.cols(), predictedMat.rows())));
        testSampleMat.copyTo(baseMat.submat(new OpenCVForUnity.Rect(predictedMat.cols(), 0, testSampleMat.cols(), testSampleMat.rows())));

        Imgproc.putText(baseMat, "Predicted", new Point(10, 15), Core.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255), 1, Imgproc.LINE_AA, false);
        Imgproc.putText(baseMat, "Confidence:", new Point(5, 25), Core.FONT_HERSHEY_SIMPLEX, 0.2, new Scalar(255), 1, Imgproc.LINE_AA, false);
        Imgproc.putText(baseMat, "   " + predictedConfidence[0], new Point(5, 33), Core.FONT_HERSHEY_SIMPLEX, 0.2, new Scalar(255), 1, Imgproc.LINE_AA, false);
        Imgproc.putText(baseMat, "TestSample", new Point(predictedMat.cols() + 10, 15), Core.FONT_HERSHEY_SIMPLEX, 0.4, new Scalar(255), 1, Imgproc.LINE_AA, false);

        Texture2D t2d = new Texture2D(baseMat.width(), baseMat.height());

        Utils.matToTexture2D(baseMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_srcImage.sprite         = sp;
        m_srcImage.preserveAspect = true;
    }
Ejemplo n.º 24
0
    // Start is called before the first frame update
    void Start()
    {
        Mat src = Imgcodecs.imread(Application.dataPath + "/Resources/Splites/Lena.jpg");

        //処理
        Process.Gaussian(src);

        Texture2D texture = new Texture2D(src.cols(), src.rows());

        Utils.matToTexture2D(src, texture);

        var png = texture.EncodeToPNG();

        File.WriteAllBytes(path, png);
    }
Ejemplo n.º 25
0
    // This implementation needs to be replaced to much sophisticated way :)
    void LoadLUTs(string path, string path2, string path3, string path4)
    {
        string[] LUTPaths     = Directory.GetFiles(path);
        string[] LUT2Paths    = Directory.GetFiles(path2);
        string[] hsvLUTPaths  = Directory.GetFiles(path3);
        string[] hsvLUT2Paths = Directory.GetFiles(path4);

        // if number of LUTs is not matched, load aborted
        if (LUTPaths.Length != LUT2Paths.Length ||
            LUTPaths.Length != hsvLUTPaths.Length ||
            LUTPaths.Length != hsvLUT2Paths.Length)
        {
            return;
        }

        int index_counter = 0;

        foreach (string lutPath in LUTPaths)
        {
            var lutPath2    = LUT2Paths[index_counter];
            var hsvLutPath  = hsvLUTPaths[index_counter];
            var hsvLutPath2 = hsvLUT2Paths[index_counter];
            var newLUTGPU   = new NamedLUTGPU()
            {
                name     = Path.GetFileNameWithoutExtension(lutPath),
                name2    = Path.GetFileNameWithoutExtension(lutPath2),
                hsvName  = Path.GetFileNameWithoutExtension(hsvLutPath),
                hsvName2 = Path.GetFileNameWithoutExtension(hsvLutPath2),
                // OpenCV dependency
                LUT           = (Texture2D)Util.toTexture(Imgcodecs.imread(lutPath), TextureFormat.RGB24),
                LUT2          = (Texture2D)Util.toTexture(Imgcodecs.imread(lutPath2), TextureFormat.RGB24),
                hsvLUT        = (Texture2D)Util.toTexture(Imgcodecs.imread(hsvLutPath), TextureFormat.RGB24),
                hsvLUT2       = (Texture2D)Util.toTexture(Imgcodecs.imread(hsvLutPath2), TextureFormat.RGB24),
                computeShader = lutProcessor,
                index         = index_counter++
            };

            newLUTGPU.LUT.wrapMode = TextureWrapMode.Clamp;
            newLUTGPU.LUT.Apply();
            newLUTGPU.LUT2.wrapMode = TextureWrapMode.Clamp;
            newLUTGPU.LUT2.Apply();
            newLUTGPU.hsvLUT.wrapMode = TextureWrapMode.Clamp;
            newLUTGPU.hsvLUT.Apply();
            newLUTGPU.hsvLUT2.wrapMode = TextureWrapMode.Clamp;
            newLUTGPU.hsvLUT2.Apply();
            Luts.Add(newLUTGPU);
        }
    }
    public static Texture2D LoadImgToTexture(string path, TextureFormat textureFormat)
    {
        Mat img = Imgcodecs.imread(Utils.getFilePath(path));

        if (img.empty())
        {
            Debug.LogError("image is not loaded");
            var tex = new Texture2D(img.width(), img.height(), textureFormat, false);
            return(tex);
        }
        var imageTex = new Texture2D(img.cols(), img.rows(), textureFormat, false);

        Utils.matToTexture2D(img, imageTex);
        img.Dispose();
        return(imageTex);
    }
Ejemplo n.º 27
0
    public static Mat LoadRGBATexture(string textureFilename)
    {
        string fn      = "Assets/" + textureFilename;
        Mat    loadMat = Imgcodecs.imread(fn);
        Mat    result  = new Mat();

        if (loadMat.width() > 0)
        {
            Imgproc.cvtColor(loadMat, result, Imgproc.COLOR_BGRA2RGBA);
        }
        else
        {
            return(null);
        }
        return(result);
    }
    private Texture2D LoadTexture(string path)
    {
        Mat img = Imgcodecs.imread(Utils.getFilePath(path));

        //Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
        if (img.empty())
        {
            Debug.LogError("image is not loaded");
            var tex = new Texture2D(img.width(), img.height(), TextureFormat.RGB24, false);
            return(tex);
        }
        var imageTex = new Texture2D(img.width(), img.height(), TextureFormat.RGB24, false);

        Utils.matToTexture2D(img, imageTex);
        return(imageTex);
    }
Ejemplo n.º 29
0
    void Start()
    {
        srcMat = Imgcodecs.imread(Application.dataPath + "/Textures/kizuna.jpg", 1);
        Imgproc.cvtColor(srcMat, srcMat, Imgproc.COLOR_BGR2RGB);

        dstMat = new Mat(new Size(100, 100), 0);
        //dstMat = srcMat.clone();
        Imgproc.floodFill(srcMat, dstMat, new Point(10, 10), new Scalar(255, 255, 0, 255));

        Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height());

        Utils.matToTexture2D(dstMat, t2d);
        Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        dstImage.sprite         = sp;
        dstImage.preserveAspect = true;
    }
Ejemplo n.º 30
0
    //自定义色度图
    void LutColorMap()
    {
        Mat lut = Imgcodecs.imread(Application.dataPath + "/Textures/colorscale_hot.jpg");

        Mat dstMat = new Mat();

        dstMat.create(srcMat.size(), srcMat.type());
        Imgproc.cvtColor(dstMat, dstMat, Imgproc.COLOR_BGR2RGB);
        Core.LUT(srcMat, lut, dstMat); //没效果

        Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height());
        Sprite    sp  = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

        m_imageList[0].sprite         = sp;
        m_imageList[0].preserveAspect = true;
        Utils.matToTexture2D(dstMat, t2d);
    }