// Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (net == null)
                {
                    Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
                else
                {
                    Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);


                    // Create a 4D blob from a frame.
                    Size inpSize = new Size(inpWidth > 0 ? inpWidth : bgrMat.cols(),
                                            inpHeight > 0 ? inpHeight : bgrMat.rows());
                    Mat blob = Dnn.blobFromImage(bgrMat, scale, inpSize, mean, swapRB, false);


                    // Run a model.
                    net.setInput(blob);

                    if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1)       // Faster-RCNN or R-FCN
                    {
                        Imgproc.resize(bgrMat, bgrMat, inpSize);
                        Mat imInfo = new Mat(1, 3, CvType.CV_32FC1);
                        imInfo.put(0, 0, new float[] {
                            (float)inpSize.height,
                            (float)inpSize.width,
                            1.6f
                        });
                        net.setInput(imInfo, "im_info");
                    }


                    TickMeter tm = new TickMeter();
                    tm.start();

                    List <Mat> outs = new List <Mat> ();
                    net.forward(outs, outBlobNames);

                    tm.stop();
//                    Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());


                    postprocess(rgbaMat, outs, net);

                    for (int i = 0; i < outs.Count; i++)
                    {
                        outs [i].Dispose();
                    }
                    blob.Dispose();
                }

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (net.empty() || classes == null)
                {
                    Imgproc.putText(rgbaMat, "model file or class names list file is not loaded.", new Point(5, rgbaMat.rows() - 50), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(rgbaMat, "The model and class names list can be downloaded here:", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(rgbaMat, "https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
                else
                {
                    blob = Dnn.blobFromImage(rgbaMat, 1, new Size(224, 224), new Scalar(104, 117, 123), false);
                    net.setInput(blob);

                    Mat prob = net.forward();

                    Core.MinMaxLocResult minmax = Core.minMaxLoc(prob.reshape(1, 1));
//                Debug.Log ("Best match " + (int)minmax.maxLoc.x);
//                Debug.Log ("Best match class " + classes [(int)minmax.maxLoc.x]);
//                Debug.Log ("Probability: " + minmax.maxVal * 100 + "%");

                    prob.Dispose();

                    Imgproc.putText(rgbaMat, "Best match class " + classes [(int)minmax.maxLoc.x], new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (net == null)
                {
                    Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
                else
                {
                    Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);

                    blob = Dnn.blobFromImage(bgrMat, inScaleFactor, new Size(inWidth, inHeight), new Scalar(meanVal, meanVal, meanVal), false, false);
                    net.setInput(blob);

                    Mat prob = net.forward();
                    prob = prob.reshape(1, (int)prob.total() / 7);


                    float[] data = new float[7];

                    float confidenceThreshold = 0.2f;
                    for (int i = 0; i < prob.rows(); i++)
                    {
                        prob.get(i, 0, data);

                        float confidence = data [2];

                        if (confidence > confidenceThreshold)
                        {
                            int class_id = (int)(data [1]);

                            float left   = data [3] * rgbaMat.cols();
                            float top    = data [4] * rgbaMat.rows();
                            float right  = data [5] * rgbaMat.cols();
                            float bottom = data [6] * rgbaMat.rows();

                            Imgproc.rectangle(rgbaMat, new Point(left, top), new Point(right, bottom),
                                              new Scalar(0, 255, 0, 255), 2);
                            string label     = classNames [class_id] + ": " + confidence;
                            int[]  baseLine  = new int[1];
                            Size   labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);

                            Imgproc.rectangle(rgbaMat, new Point(left, top),
                                              new Point(left + labelSize.width, top + labelSize.height + baseLine [0]),
                                              new Scalar(255, 255, 255, 255), Core.FILLED);
                            Imgproc.putText(rgbaMat, label, new Point(left, top + labelSize.height),
                                            Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0, 255));
                        }
                    }

                    prob.Dispose();
                }

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
    public List <KeyValuePair <string, float> > RunInference(Texture2D imageTex)
    {
        //Run();

        //clear lists
        modelOutput.Clear();
        detectionBoxes.Clear();

        //get renderer
        var rend = gameObject.GetComponent <Renderer>();

        Mat img = new Mat(imageTex.height, imageTex.width, CvType.CV_8UC3);

        Utils.texture2DToMat(imageTex, img);
        if (img.empty())
        {
            Debug.LogError(" texture2D is not loaded");
            img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
        }

        if (rend != null)
        {
            GenericUtils.AdjustImageScale(img, this.gameObject);
        }

        // Create a 4D blob from a frame.
        Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                inpHeight > 0 ? inpHeight : img.rows());
        Mat blob = Dnn.blobFromImage(img, scale, inpSize, mean, swapRB, false);

        // Run a model.
        net.setInput(blob);

        List <Mat> outs = new List <Mat>();

        net.forward(outs, outBlobNames);

        // network returns results in Mat format, therefore preprocessing is required.
        postprocess(img, outs, net);

        for (int i = 0; i < outs.Count; i++)
        {
            outs[i].Dispose();
        }
        blob.Dispose();
        img.Dispose();
        //net.Dispose();

        Utils.setDebugMode(false);

        //display image in scene if renderer exists.
        if (rend != null && displayBB)
        {
            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);
            Utils.matToTexture2D(img, texture);
            gameObject.GetComponent <Renderer>().material.mainTexture = texture;
        }

        return(modelOutput);
    }
Esempio n. 5
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (net.empty() || classes == null)
                {
                    Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
                else
                {
                    Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);

                    blob = Dnn.blobFromImage(bgrMat, 1, new Size(224, 224), new Scalar(104, 117, 123), false, true);
                    net.setInput(blob);

                    Mat prob = net.forward();

                    Core.MinMaxLocResult minmax = Core.minMaxLoc(prob.reshape(1, 1));
//                Debug.Log ("Best match " + (int)minmax.maxLoc.x);
//                Debug.Log ("Best match class " + classes [(int)minmax.maxLoc.x]);
//                Debug.Log ("Probability: " + minmax.maxVal * 100 + "%");

                    prob.Dispose();

                    Imgproc.putText(rgbaMat, "Best match class " + classes [(int)minmax.maxLoc.x], new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
    protected virtual Mat PreProcess(Mat img)
    {
        // Create a 4D blob from a frame.
        Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                inpHeight > 0 ? inpHeight : img.rows());
        Mat blob = Dnn.blobFromImage(img, scale, inpSize, mean, swapRB, false);

        return(blob);
    }
    private void TFDetect(Mat image, int imgWidth, int imgHeight, ref BoxOutline outline)
    {
        if (image == null)
        {
            Debug.Log("unable to find colors");
            return;
        }

        var blob = Dnn.blobFromImage(image, 1, new Size(300, 300), new Scalar(0, 0, 0), true, false);

        tfDetector.setInput(blob);
        Mat   prob     = tfDetector.forward();
        Mat   newMat   = prob.reshape(1, (int)prob.total() / prob.size(3));
        float maxScore = 0;
        int   scoreInd = 0;

        for (int i = 0; i < newMat.rows(); i++)
        {
            var score = (float)newMat.get(i, 2)[0];
            if (score > maxScore)
            {
                maxScore = score;
                scoreInd = i;
            }
        }
        //Debug.Log(maxScore);
        if (maxScore > 0.7)
        {
            float left   = (float)(newMat.get(scoreInd, 3)[0] * imgWidth);
            float top    = (float)(newMat.get(scoreInd, 4)[0] * imgHeight);
            float right  = (float)(newMat.get(scoreInd, 5)[0] * imgWidth);
            float bottom = (float)(newMat.get(scoreInd, 6)[0] * imgHeight);

            left   = (int)Mathf.Max(0, Mathf.Min(left, imgWidth - 1));
            top    = (int)Mathf.Max(0, Mathf.Min(top, imgHeight - 1));
            right  = (int)Mathf.Max(0, Mathf.Min(right, imgWidth - 1));
            bottom = (int)Mathf.Max(0, Mathf.Min(bottom, imgHeight - 1));

            outline = new BoxOutline
            {
                XMin = right,
                XMax = left,
                YMin = bottom,
                YMax = top
            };
        }
        else
        {
            outline = null;
        }
        prob.Dispose();
        newMat.Dispose();
    }
    protected virtual Mat PreProcess(Mat img)
    {
        Mat grayImg = new Mat();

        Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_RGB2GRAY);

        // Create a 4D blob from a frame.
        Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                inpHeight > 0 ? inpHeight : img.rows());
        Mat blob = Dnn.blobFromImage(grayImg, scale, inpSize, mean, swapRB, false, CvType.CV_32F);

        grayImg.Dispose();

        return(blob);
    }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (net.empty())
                {
                    Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
                else
                {
                    Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);

                    Scalar mean = new Scalar(103.939, 116.779, 123.68);

                    Mat blob = Dnn.blobFromImage(bgrMat, 1.0, new Size(bgrMat.width(), bgrMat.height()), mean, false, false);
                    net.setInput(blob);

                    Mat prob = net.forward();

                    int[] newshape = new int[] { prob.size(2), prob.size(3) };

                    using (Mat B_channel = new Mat(prob, new Range[] { new Range(0, 1), new Range(0, 1), Range.all(), Range.all() }).reshape(1, newshape))
                        using (Mat G_channel = new Mat(prob, new Range[] { new Range(0, 1), new Range(1, 2), Range.all(), Range.all() }).reshape(1, newshape))
                            using (Mat R_channel = new Mat(prob, new Range[] { new Range(0, 1), new Range(2, 3), Range.all(), Range.all() }).reshape(1, newshape))
                            {
                                Core.merge(new List <Mat>()
                                {
                                    B_channel, G_channel, R_channel
                                }, outMat);
                            }

                    Core.add(outMat, mean, outMat);

                    outMat.convertTo(bgrMat, CvType.CV_8U);

                    Imgproc.cvtColor(bgrMat, rgbaMat, Imgproc.COLOR_BGR2RGBA);

                    prob.Dispose();
                    blob.Dispose();
                }

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Esempio n. 10
0
        public FaceEmbeddings ExtractFaceEmbeddings(Texture2D imageTex, UnityEngine.Rect ROI)
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            var embedder = Dnn.readNetFromTorch(model_filepath);

            Mat img = new Mat(imageTex.height, imageTex.width, CvType.CV_8UC3);

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
            Utils.texture2DToMat(imageTex, img);

            if (img.empty())
            {
                Debug.LogError(input_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }

            Mat cropped_face = img.submat((int)ROI.y, (int)ROI.y + (int)ROI.height, (int)ROI.x, (int)ROI.width + (int)ROI.x);

            Imgproc.cvtColor(cropped_face, cropped_face, Imgproc.COLOR_BGR2RGB);
            var faceBlob = Dnn.blobFromImage(cropped_face, scalefactor, new Size(inpWidth, inpHeight), new Scalar(0, 0, 0), true, false);

            embedder.setInput(faceBlob);

            var netOut = embedder.forward();

            var embeddings = new FaceEmbeddings(netOut, 128);

            if (gameObject.GetComponent <Renderer>() != null && displayBB)
            {
                GenericUtils.AdjustImageScale(cropped_face, this.gameObject);
                Texture2D texture = new Texture2D(cropped_face.cols(), cropped_face.rows(), TextureFormat.RGBA32, false);
                Utils.matToTexture2D(cropped_face, texture);
                gameObject.GetComponent <Renderer>().material.mainTexture = texture;
            }

            embedder.Dispose();
            cropped_face.Dispose();
            img.Dispose();
            netOut.Dispose();

            return(embeddings);
        }
Esempio n. 11
0
    void predict()
    {
        List <byte> mnist_data = mnist_dataset[mnist_dataset_idx];

        Mat input_img = new Mat(1, IMG_HEIGHT * IMG_WIDTH, CvType.CV_32FC1);

        for (int i = 0; i < mnist_data.Count; i++)
        {
            float p = (float)mnist_data[i] / 255.0f;
            input_img.put(0, i, p);
        }

        Mat blob = Dnn.blobFromImage(input_img);

        net.setInput(blob);
        Mat prob = net.forward();

        (int max_idx, float max_value)        = get_max_idx(prob);
        answerText.GetComponent <Text>().text = "idx : " + max_idx + " , value : " + max_value.ToString("F2");
    }
    public FaceEmbeddings ExtractEmbeddings(Mat img)
    {
        //TickMeter tm = new TickMeter();
        //tm.start();

        Imgproc.cvtColor(img, img, Imgproc.COLOR_RGB2BGR);

        if (img.empty())
        {
            Debug.LogError(model_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
        }

        Imgproc.resize(img, img, new Size(224, 224), 0, 0, 2);
        var faceBlob = Dnn.blobFromImage(img, 1, new Size(224, 224), new Scalar(91.4953, 103.8827, 131.0912));

        net.setInput(faceBlob);

        var netOut = net.forward();

        var embeddings = new FaceEmbeddings(netOut, 128);


        //if (gameObject.GetComponent<Renderer>() != null)
        //{
        //    GenericUtils.AdjustImageScale(img, this.gameObject);
        //    Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGB24, false);
        //    Utils.matToTexture2D(img, texture);
        //    gameObject.GetComponent<Renderer>().material.mainTexture = texture;
        //}

        img.Dispose();
        netOut.Dispose();

        //tm.stop();
        //Debug.Log("inference:" + tm.getTimeMilli());

        return(embeddings);
    }
Esempio n. 13
0
        public Mat Extract(string path)
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            var embedder = Dnn.readNetFromTorch(model_filepath);

            Mat img = Imgcodecs.imread(Utils.getFilePath("faces/" + path));

            if (img.empty())
            {
                Debug.LogError("image is not loaded");
                return(img);
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
            var roi          = GetBB(img);
            Mat cropped_face = img.submat((int)roi.y, (int)roi.y + (int)roi.height,
                                          (int)roi.x, (int)roi.width + (int)roi.x);
            var faceBlob = Dnn.blobFromImage(cropped_face, scalefactor, new Size(inpWidth, inpHeight), new Scalar(0, 0, 0), true, false);

            embedder.setInput(faceBlob);
            var netOut = embedder.forward();

            if (gameObject.GetComponent <Renderer>() != null && displayBB)
            {
                GenericUtils.AdjustImageScale(cropped_face, this.gameObject);
                Texture2D texture = new Texture2D(cropped_face.cols(), cropped_face.rows(), TextureFormat.RGBA32, false);
                Utils.matToTexture2D(cropped_face, texture);
                gameObject.GetComponent <Renderer>().material.mainTexture = texture;
            }

            //_embedder.Dispose();
            //cropped_face.Dispose();
            img.Dispose();

            return(netOut);
        }
    protected override Mat PreProcess(Mat img)
    {
        // Input
        // The model expects input of the shape(Nx1x64x64), where N is the batch size.

        // Preprocessing
        // Given a path image_path to the image you would like to score:

        //import numpy as np
        //from PIL import Image

        //def preprocess(image_path):
        //  input_shape = (1, 1, 64, 64)
        //  img = Image.open(image_path)
        //  img = img.resize((64, 64), Image.ANTIALIAS)
        //  img_data = np.array(img)
        //  img_data = np.resize(img_data, input_shape)
        //  return img_data

        //return base.PreProcess(img);

        Mat grayImg = new Mat();

        Imgproc.cvtColor(img, grayImg, Imgproc.COLOR_RGB2GRAY);

        // Create a 4D blob from a frame.
        Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                inpHeight > 0 ? inpHeight : img.rows());
        Mat blob = Dnn.blobFromImage(grayImg, scale, inpSize, mean, swapRB, false, CvType.CV_32F);

        //Debug.Log(ToStringHighDimsMat(blob));

        grayImg.Dispose();

        return(blob);
    }
Esempio n. 15
0
    public bool Sort(Mat inputData, ref hogwartsHouse house, ref float value)
    {
        Mat blob = Dnn.blobFromImage(inputData);

        if (blob == null)
        {
            return(false);
        }

        net.setInput(blob);
        Mat prob = net.forward();

        if (prob == null)
        {
            return(false);
        }

        int idx = 0;

        (idx, value) = get_max_idx(prob);
        house        = ConvertIdx(idx);

        return(true);
    }
        // Use this for initialization
        void Run(Mat img)
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            if (!string.IsNullOrEmpty(classes))
            {
                classNames = readClassNames(classes_filepath);
#if !UNITY_WSA_10_0
                if (classNames == null)
                {
                    Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                }
#endif
            }
            else if (classesList.Count > 0)
            {
                classNames = classesList;
            }



            Net net = null;

            if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath))
            {
                Debug.LogError(config_filepath + " or " + model_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                //! [Initialize network]
                net = Dnn.readNet(model_filepath, config_filepath);
                //! [Initialize network]
            }


            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                outBlobNames = getOutputsNames(net);
                //                for (int i = 0; i < outBlobNames.Count; i++) {
                //                    Debug.Log ("names [" + i + "] " + outBlobNames [i]);
                //                }

                outBlobTypes = getOutputsTypes(net);
                //                for (int i = 0; i < outBlobTypes.Count; i++) {
                //                    Debug.Log ("types [" + i + "] " + outBlobTypes [i]);
                //                }


                // Create a 4D blob from a frame.
                Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                        inpHeight > 0 ? inpHeight : img.rows());
                Mat blob = Dnn.blobFromImage(img, scale, inpSize, mean, swapRB, false);


                // Run a model.
                net.setInput(blob);

                if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1)
                {  // Faster-RCNN or R-FCN
                    Imgproc.resize(img, img, inpSize);
                    Mat imInfo = new Mat(1, 3, CvType.CV_32FC1);
                    imInfo.put(0, 0, new float[] {
                        (float)inpSize.height,
                        (float)inpSize.width,
                        1.6f
                    });
                    net.setInput(imInfo, "im_info");
                }


                TickMeter tm = new TickMeter();
                tm.start();


                List <Mat> outs = new List <Mat>();
                net.forward(outs, outBlobNames);


                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());


                postprocess(img, outs, net);

                for (int i = 0; i < outs.Count; i++)
                {
                    outs[i].Dispose();
                }
                blob.Dispose();
                net.Dispose();
            }


            Utils.setDebugMode(false);
        }
Esempio n. 17
0
    void Run(string jpg_path)
    {
        Utils.setDebugMode(true);

        Mat img = Imgcodecs.imread(jpg_path);

        gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);

        float imageWidth  = img.width();
        float imageHeight = img.height();

        float widthScale  = (float)Screen.width / imageWidth;
        float heightScale = (float)Screen.height / imageHeight;

        if (widthScale < heightScale)
        {
            Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
        }
        else
        {
            Camera.main.orthographicSize = imageHeight / 2;
        }


        Net net = null;

        if (!string.IsNullOrEmpty(graph_filepath))
        {
            net = Dnn.readNetFromTensorflow(graph_filepath);
        }

        if (net == null)
        {
            Imgproc.putText(img, "Model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
        }
        else
        {
            float frameWidth  = img.cols();
            float frameHeight = img.rows();
            Mat   input       = Dnn.blobFromImage(img, 1.0, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false);
            net.setInput(input, "image");
            Mat output = net.forward("Openpose/concat_stage7");
            output = output.reshape(1, 57);

            List <Point> points = new List <Point>();
            for (int i = 0; i < BODY_PARTS.Count; i++)
            {
                Mat heatMap = output.row(i).reshape(1, 46);
                Core.MinMaxLocResult result = Core.minMaxLoc(heatMap);
                heatMap.Dispose();

                double x = (frameWidth * result.maxLoc.x) / 46;
                double y = (frameHeight * result.maxLoc.y) / 46;

                if (result.maxVal > 0.3)
                {
                    points.Add(new Point(x, y));
                }
                else
                {
                    points.Add(null);
                }
            }

            for (int i = 0; i < POSE_PAIRS.GetLength(0); i++)
            {
                string partFrom = POSE_PAIRS[i, 0];
                string partTo   = POSE_PAIRS[i, 1];

                int idFrom = BODY_PARTS[partFrom];
                int idTo   = BODY_PARTS[partTo];

                if (points[idFrom] != null && points[idTo] != null)
                {
                    Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
                    Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                }
            }
        }

        Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
        Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(img, texture);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
        Utils.setDebugMode(false);
    }
Esempio n. 18
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            Mat img = Imgcodecs.imread(dnn004545_jpg_filepath);

            #if !UNITY_WSA_10_0
            if (img.empty())
            {
                Debug.LogError("dnn/004545.jpg is not loaded.The image file can be downloaded here: \"https://github.com/chuanqi305/MobileNet-SSD/blob/master/images/004545.jpg\".Please copy to \"Assets/StreamingAssets/dnn/\" folder. ");
                img = new Mat(375, 500, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }
            #endif


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;
            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(MobileNetSSD_deploy_caffemodel_filepath) || string.IsNullOrEmpty(MobileNetSSD_deploy_prototxt_filepath))
            {
                Debug.LogError("model file is not loaded.The model and prototxt file can be downloaded here: \"https://github.com/chuanqi305/MobileNet-SSD\".Please copy to “Assets/StreamingAssets/dnn/” folder. ");
            }
            else
            {
                net = Dnn.readNetFromCaffe(MobileNetSSD_deploy_prototxt_filepath, MobileNetSSD_deploy_caffemodel_filepath);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                Mat blob = Dnn.blobFromImage(img, inScaleFactor, new Size(inWidth, inHeight), new Scalar(meanVal, meanVal, meanVal), false, false);

                net.setInput(blob);


                TickMeter tm = new TickMeter();
                tm.start();

                Mat prob = net.forward();
                prob = prob.reshape(1, (int)prob.total() / 7);

                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());



                float[] data = new float[7];

                float confidenceThreshold = 0.2f;
                for (int i = 0; i < prob.rows(); i++)
                {
                    prob.get(i, 0, data);

                    float confidence = data [2];

                    if (confidence > confidenceThreshold)
                    {
                        int class_id = (int)(data [1]);

                        float left   = data [3] * img.cols();
                        float top    = data [4] * img.rows();
                        float right  = data [5] * img.cols();
                        float bottom = data [6] * img.rows();

                        Debug.Log("class_id: " + class_id);
                        Debug.Log("Confidence: " + confidence);

                        Debug.Log(" " + left
                                  + " " + top
                                  + " " + right
                                  + " " + bottom);

                        Imgproc.rectangle(img, new Point(left, top), new Point(right, bottom),
                                          new Scalar(0, 255, 0), 2);
                        string label     = classNames [class_id] + ": " + confidence;
                        int[]  baseLine  = new int[1];
                        Size   labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);

                        top = Mathf.Max(top, (float)labelSize.height);

                        Imgproc.rectangle(img, new Point(left, top),
                                          new Point(left + labelSize.width, top + labelSize.height + baseLine [0]),
                                          new Scalar(255, 255, 255), Core.FILLED);
                        Imgproc.putText(img, label, new Point(left, top + labelSize.height),
                                        Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0));
                    }
                }

                prob.Dispose();
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
Esempio n. 19
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image_filepath);

            #if !UNITY_WSA_10_0
            if (img.empty())
            {
                Debug.LogError("dnn/COCO_val2014_000000000589.jpg is not loaded.The image file can be downloaded here: \"https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/examples/media/COCO_val2014_000000000589.jpg\" folder. ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }
            #endif


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;
            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath))
            {
                Debug.LogError("model file is not loaded. The model and prototxt file can be downloaded here: \"http://posefs1.perception.cs.cmu.edu/OpenPose/models/pose/mpi/pose_iter_160000.caffemodel\",\"https://github.com/opencv/opencv_extra/blob/master/testdata/dnn/openpose_pose_mpi_faster_4_stages.prototxt\". Please copy to “Assets/StreamingAssets/dnn/” folder. ");
            }
            else
            {
                net = Dnn.readNetFromCaffe(prototxt_filepath, caffemodel_filepath);

                //Intel's Deep Learning Inference Engine backend is supported on Windows 64bit platform only. Please refer to ReadMe.pdf for the setup procedure.
                //net.setPreferableBackend (Dnn.DNN_BACKEND_INFERENCE_ENGINE);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                float frameWidth  = img.cols();
                float frameHeight = img.rows();

                Mat input = Dnn.blobFromImage(img, 1.0 / 255, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false);

                net.setInput(input);

//                TickMeter tm = new TickMeter ();
//                tm.start ();

                Mat output = net.forward();

//                tm.stop ();
//                Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());


                output = output.reshape(1, 16);


                float[]      data   = new float[46 * 46];
                List <Point> points = new List <Point> ();
                for (int i = 0; i < BODY_PARTS.Count; i++)
                {
                    output.get(i, 0, data);

                    Mat heatMap = new Mat(1, data.Length, CvType.CV_32FC1);
                    heatMap.put(0, 0, data);


                    //Originally, we try to find all the local maximums. To simplify a sample
                    //we just find a global one. However only a single pose at the same time
                    //could be detected this way.
                    Core.MinMaxLocResult result = Core.minMaxLoc(heatMap);

                    heatMap.Dispose();


                    double x = (frameWidth * (result.maxLoc.x % 46)) / 46;
                    double y = (frameHeight * (result.maxLoc.x / 46)) / 46;

                    if (result.maxVal > 0.1)
                    {
                        points.Add(new Point(x, y));
                    }
                    else
                    {
                        points.Add(null);
                    }
                }

                for (int i = 0; i < POSE_PAIRS.GetLength(0); i++)
                {
                    string partFrom = POSE_PAIRS [i, 0];
                    string partTo   = POSE_PAIRS [i, 1];

                    int idFrom = BODY_PARTS [partFrom];
                    int idTo   = BODY_PARTS [partTo];

                    if (points [idFrom] != null && points [idTo] != null)
                    {
                        Imgproc.line(img, points [idFrom], points [idTo], new Scalar(0, 255, 0), 3);
                        Imgproc.ellipse(img, points [idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                        Imgproc.ellipse(img, points [idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    }
                }



                MatOfDouble timings = new MatOfDouble();
                long        t       = net.getPerfProfile(timings);
                Debug.Log("t: " + t);
                Debug.Log("timings.dump(): " + timings.dump());

                double freq = Core.getTickFrequency() / 1000;
                Debug.Log("freq: " + freq);

                Imgproc.putText(img, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(0, 0, 255), 2);
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);


            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
Esempio n. 20
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image_filepath);

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath))
            {
                Debug.LogError(caffemodel_filepath + " or " + prototxt_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                net = Dnn.readNet(prototxt_filepath, caffemodel_filepath);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                float frameWidth  = img.cols();
                float frameHeight = img.rows();

                Mat input = Dnn.blobFromImage(img, inScale, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false);

                net.setInput(input);

                //TickMeter tm = new TickMeter ();
                //tm.start ();

                Mat output = net.forward();

                //tm.stop ();
                //Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());

                //Debug.Log("output.size(0) " + output.size(0));
                //Debug.Log("output.size(1) " + output.size(1));
                //Debug.Log("output.size(2) " + output.size(2));
                //Debug.Log("output.size(3) " + output.size(3));

                float[] data = new float[output.size(2) * output.size(3)];

                output = output.reshape(1, output.size(1));

                List <Point> points = new List <Point>();
                for (int i = 0; i < BODY_PARTS.Count; i++)
                {
                    output.get(i, 0, data);

                    Mat heatMap = new Mat(1, data.Length, CvType.CV_32FC1);
                    heatMap.put(0, 0, data);


                    //Originally, we try to find all the local maximums. To simplify a sample
                    //we just find a global one. However only a single pose at the same time
                    //could be detected this way.
                    Core.MinMaxLocResult result = Core.minMaxLoc(heatMap);

                    heatMap.Dispose();


                    double x = (frameWidth * (result.maxLoc.x % 46)) / 46;
                    double y = (frameHeight * (result.maxLoc.x / 46)) / 46;

                    if (result.maxVal > 0.1)
                    {
                        points.Add(new Point(x, y));
                    }
                    else
                    {
                        points.Add(null);
                    }
                }

                for (int i = 0; i < POSE_PAIRS.GetLength(0); i++)
                {
                    string partFrom = POSE_PAIRS[i, 0];
                    string partTo   = POSE_PAIRS[i, 1];

                    int idFrom = BODY_PARTS[partFrom];
                    int idTo   = BODY_PARTS[partTo];

                    if (points[idFrom] != null && points[idTo] != null)
                    {
                        Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
                        Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                        Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    }
                }

                MatOfDouble timings = new MatOfDouble();
                long        t       = net.getPerfProfile(timings);
                Debug.Log("t: " + t);
                Debug.Log("timings.dump(): " + timings.dump());

                double freq = Core.getTickFrequency() / 1000;
                Debug.Log("freq: " + freq);

                Imgproc.putText(img, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(0, 0, 255), 2);
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);


            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image_filepath, Imgcodecs.IMREAD_COLOR);

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }

            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net detector   = null;
            Net recognizer = null;

            if (string.IsNullOrEmpty(detectionmodel_filepath) || string.IsNullOrEmpty(recognitionmodel_filepath))
            {
                Debug.LogError(detectionmodel_filepath + " or " + recognitionmodel_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                detector   = Dnn.readNet(detectionmodel_filepath);
                recognizer = Dnn.readNet(recognitionmodel_filepath);
            }

            if (detector == null || recognizer == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                TickMeter tickMeter = new TickMeter();

                List <Mat>    outs     = new List <Mat>();
                List <string> outNames = new List <string>();
                outNames.Add("feature_fusion/Conv_7/Sigmoid");
                outNames.Add("feature_fusion/concat_3");

                // Create a 4D blob from a frame.
                Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(), inpHeight > 0 ? inpHeight : img.rows());
                Mat  blob    = Dnn.blobFromImage(img, 1.0, inpSize, new Scalar(123.68, 116.78, 103.94), true, false); // blobFromImage(frame, blob, 1.0, Size(inpWidth, inpHeight), Scalar(123.68, 116.78, 103.94), true, false);

                // Run detection model.
                detector.setInput(blob);
                tickMeter.start();
                detector.forward(outs, outNames);
                tickMeter.stop();

                Mat scores   = outs[0];
                Mat geometry = outs[1];

                // Decode predicted bounding boxes.
                List <RotatedRect> boxes       = new List <RotatedRect>();
                List <float>       confidences = new List <float>();
                decodeBoundingBoxes(scores, geometry, confThreshold, boxes, confidences);


                // Apply non-maximum suppression procedure.
                MatOfRotatedRect boxesMat       = new MatOfRotatedRect(boxes.ToArray());
                MatOfFloat       confidencesMat = new MatOfFloat(confidences.ToArray());
                MatOfInt         indicesMat     = new MatOfInt();
                Dnn.NMSBoxesRotated(boxesMat, confidencesMat, confThreshold, nmsThreshold, indicesMat);

                List <int> indices = indicesMat.toList();
                Point      ratio   = new Point(img.cols() / inpWidth, img.rows() / inpHeight);

                // Render text.
                for (int i = 0; i < indices.Count; ++i)
                {
                    RotatedRect box = boxes[indices[i]];

                    Point[] vertices = new Point[4];
                    box.points(vertices);

                    for (int j = 0; j < 4; ++j)
                    {
                        vertices[j].x *= ratio.x;
                        vertices[j].y *= ratio.y;
                    }

                    for (int j = 0; j < 4; ++j)
                    {
                        Imgproc.line(img, vertices[j], vertices[(j + 1) % 4], new Scalar(0, 255, 0), 1);
                    }

                    if (recognizer != null)
                    {
                        Mat cropped = new Mat();
                        fourPointsTransform(img, vertices, cropped);

                        //Debug.Log(cropped);

                        Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_BGR2GRAY);

                        Mat blobCrop = Dnn.blobFromImage(cropped, 1.0 / 127.5, new Size(), Scalar.all(127.5));
                        recognizer.setInput(blobCrop);

                        //Debug.Log(blobCrop);

                        tickMeter.start();
                        Mat result = recognizer.forward();
                        tickMeter.stop();

                        string wordRecognized;
                        decodeText(result, out wordRecognized);
                        Imgproc.putText(img, wordRecognized, vertices[1], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 0, 0), 1, Imgproc.LINE_AA, false);

                        Debug.Log(wordRecognized);


                        cropped.Dispose();
                        blobCrop.Dispose();
                        result.Dispose();
                    }
                }

                Debug.Log("Inference time, ms: " + tickMeter.getTimeMilli());

                for (int i = 0; i < outs.Count; i++)
                {
                    outs[i].Dispose();
                }
                blob.Dispose();
                detector.Dispose();
                recognizer.Dispose();
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
Esempio n. 22
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            classNames = readClassNames(classes_filepath);
            if (classNames == null)
            {
                Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }

            classColors = new List <Scalar>();
            for (int i = 0; i < classNames.Count; i++)
            {
                classColors.Add(new Scalar(UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255)));
            }


            Mat img = Imgcodecs.imread(image_filepath);

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(height, width, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }



            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(model_filepath) || string.IsNullOrEmpty(config_filepath))
            {
                Debug.LogError(model_filepath + " or " + config_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                net = Dnn.readNetFromTensorflow(model_filepath, config_filepath);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                float frameW = img.cols();
                float frameH = img.rows();

                // Create a 4D blob from a frame.
                Mat blob = Dnn.blobFromImage(img, 1.0, new Size(width, height), new Scalar(0, 0, 0), true, false);

                //Run a model
                net.setInput(blob);

                List <Mat>    outputBlobs = new List <Mat>();
                List <string> outputName  = new List <string>();
                outputName.Add("detection_out_final");
                outputName.Add("detection_masks");

                TickMeter tm = new TickMeter();
                tm.start();

                net.forward(outputBlobs, outputName);

                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());

                Mat boxes = outputBlobs[0];
                Mat masks = outputBlobs[1];

                //int numClasses = masks.size(1);
                int numDetections = boxes.size(2);
                int mask_sizeH    = masks.size(2);
                int mask_sizeW    = masks.size(3);

                float[] box_data  = new float[boxes.size(3)];
                float[] mask_data = new float[masks.size(2) * masks.size(3)];

                for (int i = 0; i < numDetections; i++)
                {
                    boxes.get(new int[] { 0, 0, i, 0 }, box_data);

                    float score = box_data[2];

                    if (score > thr)
                    {
                        int classId = (int)box_data[1];

                        float left   = (int)frameW * box_data[3];
                        float top    = (int)frameH * box_data[4];
                        float right  = (int)frameW * box_data[5];
                        float bottom = (int)frameH * box_data[6];

                        left   = (int)Mathf.Max(0, Mathf.Min(left, frameW - 1));
                        top    = (int)Mathf.Max(0, Mathf.Min(top, frameH - 1));
                        right  = (int)Mathf.Max(0, Mathf.Min(right, frameW - 1));
                        bottom = (int)Mathf.Max(0, Mathf.Min(bottom, frameH - 1));


                        masks.get(new int[] { i, classId, 0, 0 }, mask_data);

                        Mat classMask = new Mat(mask_sizeH, mask_sizeW, CvType.CV_32F);
                        classMask.put(0, 0, mask_data);
                        Imgproc.resize(classMask, classMask, new Size(right - left + 1, bottom - top + 1));
                        Core.compare(classMask, new Scalar(0.5), classMask, Core.CMP_GT);

                        Mat roi        = new Mat(img, new OpenCVForUnity.CoreModule.Rect(new Point(left, top), new Point(right + 1, bottom + 1)));
                        Mat coloredRoi = new Mat(roi.size(), CvType.CV_8UC3);
                        Imgproc.rectangle(coloredRoi, new Point(0, 0), new Point(coloredRoi.width(), coloredRoi.height()), classColors[classId], -1);
                        Core.addWeighted(coloredRoi, 0.7, roi, 0.3, 0, coloredRoi);

                        coloredRoi.copyTo(roi, classMask);
                        coloredRoi.Dispose();
                        classMask.Dispose();


                        drawPred(classId, score, left, top, right, bottom, img);

                        Debug.Log("classId:" + classId + " cnof:" + score + " l:" + left + " t:" + top + " r:" + right + " b:" + bottom);
                    }
                }

                boxes.Dispose();
                masks.Dispose();
                blob.Dispose();
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            net.Dispose();


            Utils.setDebugMode(false);
        }
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            List <string> classNames = readClassNames(coco_names_filepath);

            #if !UNITY_WSA_10_0
            if (classNames == null)
            {
                Debug.LogError("class names list file is not loaded.The model and class names list can be downloaded here: \"https://github.com/pjreddie/darknet/tree/master/data/coco.names\".Please copy to “Assets/StreamingAssets/dnn/” folder. ");
            }
            #endif


            Mat img = Imgcodecs.imread(person_jpg_filepath);
            #if !UNITY_WSA_10_0
            if (img.empty())
            {
                Debug.LogError("dnn/person.jpg is not loaded.The image file can be downloaded here: \"https://github.com/pjreddie/darknet/blob/master/data/person.jpg\".Please copy to \"Assets/StreamingAssets/dnn/\" folder. ");
                img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }
            #endif


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;
            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(tiny_yolo_cfg_filepath) || string.IsNullOrEmpty(tiny_yolo_weights_filepath))
            {
                Debug.LogError("model file is not loaded. the cfg-file and weights-file can be downloaded here: https://github.com/pjreddie/darknet/blob/master/cfg/tiny-yolo.cfg and https://pjreddie.com/media/files/tiny-yolo.weights. Please copy to “Assets/StreamingAssets/dnn/” folder. ");
            }
            else
            {
                //! [Initialize network]
                net = Dnn.readNetFromDarknet(tiny_yolo_cfg_filepath, tiny_yolo_weights_filepath);
                //! [Initialize network]
            }


            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                //! [Resizing without keeping aspect ratio]
                Mat resized = new Mat();
                Imgproc.resize(img, resized, new Size(network_width, network_height));
                //! [Resizing without keeping aspect ratio]

                //! [Prepare blob]
                Mat inputBlob = Dnn.blobFromImage(resized, 1 / 255.0, new Size(), new Scalar(0), true, true);    //Convert Mat to batch of images
                //! [Prepare blob]

                //! [Set input blob]
                net.setInput(inputBlob, "data");                    //set the network input
                //! [Set input blob]


                TickMeter tm = new TickMeter();
                tm.start();

                //! [Make forward pass]
                Mat detectionMat = net.forward("detection_out");    //compute output
                //! [Make forward pass]

                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());

                Debug.Log("detectionMat.ToString(): " + detectionMat.ToString());

                float[] position    = new float[5];
                float[] confidences = new float[80];

                float confidenceThreshold = 0.24f;
                for (int i = 0; i < detectionMat.rows(); i++)
                {
                    detectionMat.get(i, 0, position);

                    detectionMat.get(i, 5, confidences);

                    int   maxIdx     = confidences.Select((val, idx) => new { V = val, I = idx }).Aggregate((max, working) => (max.V > working.V) ? max : working).I;
                    float confidence = confidences [maxIdx];

                    if (confidence > confidenceThreshold)
                    {
                        float x           = position [0];
                        float y           = position [1];
                        float width       = position [2];
                        float height      = position [3];
                        int   xLeftBottom = (int)((x - width / 2) * img.cols());
                        int   yLeftBottom = (int)((y - height / 2) * img.rows());
                        int   xRightTop   = (int)((x + width / 2) * img.cols());
                        int   yRightTop   = (int)((y + height / 2) * img.rows());

                        Debug.Log("confidence: " + confidence);

                        Debug.Log(" " + xLeftBottom
                                  + " " + yLeftBottom
                                  + " " + xRightTop
                                  + " " + yRightTop);

                        Imgproc.rectangle(img, new Point(xLeftBottom, yLeftBottom), new Point(xRightTop, yRightTop),
                                          new Scalar(0, 255, 0), 2);

                        if (maxIdx < classNames.Count)
                        {
                            string label     = classNames [maxIdx] + ": " + confidence;
                            int[]  baseLine  = new int[1];
                            Size   labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);

                            Imgproc.rectangle(img, new Point(xLeftBottom, yLeftBottom),
                                              new Point(xLeftBottom + labelSize.width, yLeftBottom + labelSize.height + baseLine [0]),
                                              new Scalar(255, 255, 255), Core.FILLED);
                            Imgproc.putText(img, label, new Point(xLeftBottom, yLeftBottom + labelSize.height),
                                            Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0));
                        }
                    }
                }
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
Esempio n. 24
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            classNames = readClassNames(classes_filepath);
            if (classNames == null)
            {
                Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }

            classColors = new List <Scalar> ();
            for (int i = 0; i < classNames.Count; i++)
            {
                classColors.Add(new Scalar(UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255)));
            }


            Mat img = Imgcodecs.imread(image_filepath);

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(height, width, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }



            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(model_filepath) || string.IsNullOrEmpty(config_filepath))
            {
                Debug.LogError(model_filepath + " or " + config_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                net = Dnn.readNetFromTensorflow(model_filepath, config_filepath);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                float frameWidth  = img.cols();
                float frameHeight = img.rows();

                Mat blob = Dnn.blobFromImage(img, 1.0, new Size(width, height), new Scalar(0, 0, 0), true, false);


                net.setInput(blob);



                List <Mat>    outputBlobs = new List <Mat> ();
                List <string> outputName  = new List <string> ();
                outputName.Add("detection_out_final");
                outputName.Add("detection_masks");

                net.forward(outputBlobs, outputName);

                Mat boxes = outputBlobs [0];
                Mat masks = outputBlobs [1];


                //int numClasses = masks.size (1);
                //int numDetections = boxes.size (2);


                Debug.Log("boxes.size(0) " + boxes.size(0));
                Debug.Log("boxes.size(1) " + boxes.size(1));
                Debug.Log("boxes.size(2) " + boxes.size(2));
                Debug.Log("boxes.size(3) " + boxes.size(3));
                Debug.Log("masks.size(0) " + masks.size(0));
                Debug.Log("masks.size(1) " + masks.size(1));
                Debug.Log("masks.size(2) " + masks.size(2));
                Debug.Log("masks.size(3) " + masks.size(3));


                //reshape from 4D to two 2D.
                float[] data = new float[boxes.size(3)];
                boxes = boxes.reshape(1, (int)boxes.total() / boxes.size(3));
//              Debug.Log ("boxes.ToString() " + boxes.ToString ());

                //reshape from 4D to two 2D.
                float[] mask_data = new float[masks.size(2) * masks.size(3)];
                masks = masks.reshape(1, (int)masks.total() / (masks.size(2) * masks.size(3)));
//              Debug.Log ("masks.ToString(): " + masks.ToString ());


                for (int i = 0; i < boxes.rows(); i++)
                {
                    boxes.get(i, 0, data);

                    float score = data [2];

                    if (score > thr)
                    {
                        int class_id = (int)(data [1]);


                        float left   = (float)(data [3] * frameWidth);
                        float top    = (float)(data [4] * frameHeight);
                        float right  = (float)(data [5] * frameWidth);
                        float bottom = (float)(data [6] * frameHeight);

                        left   = (int)Mathf.Max(0, Mathf.Min(left, frameWidth - 1));
                        top    = (int)Mathf.Max(0, Mathf.Min(top, frameHeight - 1));
                        right  = (int)Mathf.Max(0, Mathf.Min(right, frameWidth - 1));
                        bottom = (int)Mathf.Max(0, Mathf.Min(bottom, frameHeight - 1));

                        Debug.Log("class_id: " + class_id + " class_name " + classNames [class_id] + " left: " + left + " top: " + top + " right: " + right + " bottom: " + bottom);



                        //draw masks

                        masks.get((i * 90) + class_id, 0, mask_data);

                        Mat objectMask = new Mat(15, 15, CvType.CV_32F);
                        Utils.copyToMat <float> (mask_data, objectMask);

                        Imgproc.resize(objectMask, objectMask, new Size(right - left + 1, bottom - top + 1));

                        Core.compare(objectMask, new Scalar(mask_thr), objectMask, Core.CMP_GT);
//                        Debug.Log ("objectMask.ToString(): " + objectMask.ToString ());
//                        Debug.Log ("objectMask.dump(): " + objectMask.dump ());


                        Mat roi = new Mat(img, new OpenCVForUnity.CoreModule.Rect(new Point(left, top), new Point(right + 1, bottom + 1)));

                        Mat coloredRoi = new Mat(roi.size(), CvType.CV_8UC3);

                        Imgproc.rectangle(coloredRoi, new Point(0, 0), new Point(coloredRoi.width(), coloredRoi.height()), classColors [class_id], -1);

                        Core.addWeighted(coloredRoi, 0.7, roi, 0.3, 0, coloredRoi);
//                        Debug.Log ("coloredRoi.ToString(): " + coloredRoi.ToString ());
//                        Debug.Log ("roi.ToString(): " + roi.ToString ());

                        coloredRoi.copyTo(roi, objectMask);
                        coloredRoi.Dispose();

                        objectMask.Dispose();



                        //draw boxes

                        Imgproc.rectangle(img, new Point(left, top), new Point(right, bottom), new Scalar(0, 255, 0), 2);

                        string label = score.ToString();
                        if (classNames != null && classNames.Count != 0)
                        {
                            if (class_id < (int)classNames.Count)
                            {
                                label = classNames [class_id] + ": " + label;
                            }
                        }

                        int[] baseLine  = new int[1];
                        Size  labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);

                        top = Mathf.Max(top, (int)labelSize.height);
                        Imgproc.rectangle(img, new Point(left, top - labelSize.height),
                                          new Point(left + labelSize.width, top + baseLine [0]), Scalar.all(255), Core.FILLED);
                        Imgproc.putText(img, label, new Point(left, top), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0, 255));
                    }
                }

                boxes.Dispose();
                masks.Dispose();
                blob.Dispose();
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);


            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;



            net.Dispose();


            Utils.setDebugMode(false);
        }
Esempio n. 25
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img       = Imgcodecs.imread(image_filepath, Imgcodecs.IMREAD_COLOR);
            Mat colorized = new Mat(img.rows(), img.cols(), img.type());

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }

            Net net = null;

            if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath))
            {
                Debug.LogError(caffemodel_filepath + " or " + prototxt_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                net = Dnn.readNetFromCaffe(prototxt_filepath, caffemodel_filepath);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                // setup additional layers:
                int[] sz          = new int[] { 2, 313, 1, 1 };
                Mat   pts_in_hull = new Mat(sz, CvType.CV_32F);
                pts_in_hull.put(new int[] { 0, 0, 0, 0 }, hull_pts);

                Layer      class8_ab = net.getLayer(new DictValue("class8_ab"));
                List <Mat> blobs     = class8_ab.get_blobs();
                blobs.Add(pts_in_hull);
                class8_ab.set_blobs(blobs);

                Layer conv8_313_rh = net.getLayer(new DictValue("conv8_313_rh"));
                blobs = conv8_313_rh.get_blobs();
                blobs.Add(new Mat(1, 313, CvType.CV_32F, new Scalar(2.606)));
                conv8_313_rh.set_blobs(blobs);

                // extract L channel and subtract mean
                Mat img_32F = new Mat();
                Mat lab     = new Mat();
                Mat L       = new Mat();
                Mat input   = new Mat();
                img.convertTo(img_32F, CvType.CV_32F, 1.0 / 255);
                Imgproc.cvtColor(img_32F, lab, Imgproc.COLOR_BGR2Lab);
                Core.extractChannel(lab, L, 0);
                Imgproc.resize(L, input, new Size(inWidth, inHeight));
                Core.subtract(input, new Scalar(50.0), input);

                // run the L channel through the network
                Mat inputBlob = Dnn.blobFromImage(input);
                net.setInput(inputBlob);
                Mat result = net.forward();

                // retrieve the calculated a,b channels from the network output
                Mat result_a = new Mat(result, new Range[] { new Range(0, 1), new Range(0, 1), new Range(0, result.size(2)), new Range(0, result.size(3)) });
                Mat result_b = new Mat(result, new Range[] { new Range(0, 1), new Range(1, 2), new Range(0, result.size(2)), new Range(0, result.size(3)) });
                result_a = result_a.reshape(1, result.size(2));
                result_b = result_b.reshape(1, result.size(2));
                Mat a = new Mat(img.size(), CvType.CV_32F);
                Mat b = new Mat(img.size(), CvType.CV_32F);
                Imgproc.resize(result_a, a, img.size());
                Imgproc.resize(result_b, b, img.size());

                // merge, and convert back to BGR
                List <Mat> chn = new List <Mat>();
                chn.Add(L); chn.Add(a); chn.Add(b);
                Core.merge(chn, lab);
                Imgproc.cvtColor(lab, img_32F, Imgproc.COLOR_Lab2BGR);
                img_32F.convertTo(colorized, CvType.CV_8U, 255.0);



                MatOfDouble timings = new MatOfDouble();
                long        t       = net.getPerfProfile(timings);
                double      freq    = Core.getTickFrequency() / 1000;
                Debug.Log("Inference time " + (t / freq) + "ms");
                Imgproc.putText(colorized, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2);

                Imgproc.putText(img, "gray", new Point(10, 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2);
                Imgproc.putText(colorized, "colorized", new Point(10, 20), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2);
            }

            Imgproc.cvtColor(colorized, colorized, Imgproc.COLOR_BGR2RGB);

            Mat display = new Mat(img.rows() * 2, img.cols(), img.type());

            using (Mat gray = new Mat(img.rows(), img.cols(), CvType.CV_8UC1))
                using (Mat displayUpperHalf = new Mat(display, new Range(0, img.rows())))
                    using (Mat displayLowerHalf = new Mat(display, new Range(img.rows(), display.rows())))
                    {
                        Imgproc.cvtColor(img, gray, Imgproc.COLOR_BGR2GRAY);
                        Imgproc.cvtColor(gray, img, Imgproc.COLOR_GRAY2RGB);

                        img.copyTo(displayUpperHalf);
                        colorized.copyTo(displayLowerHalf);
                    }

            Texture2D texture = new Texture2D(display.cols(), display.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(display, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(display.width(), display.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = display.width();
            float imageHeight = display.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Utils.setDebugMode(false);
        }
Esempio n. 26
0
    void Run()
    {
        Utils.setDebugMode(true);

        Mat img = webCamTextureToMatHelper.GetMat();

        Imgproc.cvtColor(img, img, Imgproc.COLOR_RGBA2BGR);
        gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);

        if (net != null)
        {
            float frameWidth  = img.cols();
            float frameHeight = img.rows();
            Mat   input       = Dnn.blobFromImage(img, 1.0, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false);
            net.setInput(input, "image");
            Mat output = net.forward("Openpose/concat_stage7");

            output = output.reshape(1, 57);
            List <Point> points = new List <Point>();
            for (int i = 0; i < BODY_PARTS.Count; i++)
            {
                Mat heatMap = output.row(i).reshape(1, 46);
                Core.MinMaxLocResult result = Core.minMaxLoc(heatMap);
                heatMap.Dispose();

                double x = (frameWidth * result.maxLoc.x) / 46;
                double y = (frameHeight * result.maxLoc.y) / 46;

                if (result.maxVal > 0.3)
                {
                    points.Add(new Point(x, y));
                }
                else
                {
                    points.Add(null);
                }
            }

            for (int i = 0; i < POSE_PAIRS.GetLength(0); i++)
            {
                string partFrom = POSE_PAIRS[i, 0];
                string partTo   = POSE_PAIRS[i, 1];

                int idFrom = BODY_PARTS[partFrom];
                int idTo   = BODY_PARTS[partTo];

                if (points[idFrom] != null && points[idTo] != null)
                {
                    Debug.Log("x=" + points[idFrom].x + " y=" + points[idFrom].y);
                    Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
                    Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    float avgFrameRate = Time.frameCount / Time.time;
                    Imgproc.putText(img, "FR=" + avgFrameRate, new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
            }
        }

        Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
        Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

        Utils.matToTexture2D(img, texture);
        gameObject.GetComponent <Renderer>().material.mainTexture = texture;
    }
Esempio n. 27
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                if (net == null)
                {
                    Imgproc.putText(rgbaMat, "model file is not loaded.", new Point(5, rgbaMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    Imgproc.putText(rgbaMat, "Please read console message.", new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                }
                else
                {
                    Imgproc.cvtColor(rgbaMat, bgrMat, Imgproc.COLOR_RGBA2BGR);

                    //! [Resizing without keeping aspect ratio]

                    Imgproc.resize(bgrMat, resized, new Size(network_width, network_height));
                    //! [Resizing without keeping aspect ratio]


                    //! [Prepare blob]
                    inputBlob = Dnn.blobFromImage(resized, 1 / 255.0, new Size(), new Scalar(0), true, true);    //Convert Mat to batch of images
                    //! [Prepare blob]

                    //! [Set input blob]
                    net.setInput(inputBlob, "data");                    //set the network input
                    //! [Set input blob]


//                    TickMeter tm = new TickMeter ();
//                    tm.start ();

                    //! [Make forward pass]
                    Mat detectionMat = net.forward("detection_out");    //compute output
                    //! [Make forward pass]

//                    tm.stop ();
//                    Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());

//                    Debug.Log ("detectionMat.ToString(): " + detectionMat.ToString ());

                    float[] position    = new float[5];
                    float[] confidences = new float[80];

                    float confidenceThreshold = 0.24f;
                    for (int i = 0; i < detectionMat.rows(); i++)
                    {
                        detectionMat.get(i, 0, position);

                        detectionMat.get(i, 5, confidences);


                        int   maxIdx     = confidences.Select((val, idx) => new { V = val, I = idx }).Aggregate((max, working) => (max.V > working.V) ? max : working).I;
                        float confidence = confidences [maxIdx];


                        if (confidence > confidenceThreshold)
                        {
                            float x           = position [0];
                            float y           = position [1];
                            float width       = position [2];
                            float height      = position [3];
                            int   xLeftBottom = (int)((x - width / 2) * rgbaMat.cols());
                            int   yLeftBottom = (int)((y - height / 2) * rgbaMat.rows());
                            int   xRightTop   = (int)((x + width / 2) * rgbaMat.cols());
                            int   yRightTop   = (int)((y + height / 2) * rgbaMat.rows());

//                            Debug.Log ("confidence: " + confidence);
//
//                            Debug.Log (" " + xLeftBottom
//                            + " " + yLeftBottom
//                            + " " + xRightTop
//                            + " " + yRightTop);


                            Imgproc.rectangle(rgbaMat, new Point(xLeftBottom, yLeftBottom), new Point(xRightTop, yRightTop),
                                              new Scalar(0, 255, 0, 255), 2);

                            if (maxIdx < classNames.Count)
                            {
                                string label     = classNames [maxIdx] + ": " + confidence;
                                int[]  baseLine  = new int[1];
                                Size   labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);

                                Imgproc.rectangle(rgbaMat, new Point(xLeftBottom, yLeftBottom),
                                                  new Point(xLeftBottom + labelSize.width, yLeftBottom + labelSize.height + baseLine [0]),
                                                  new Scalar(255, 255, 255, 255), Core.FILLED);
                                Imgproc.putText(rgbaMat, label, new Point(xLeftBottom, yLeftBottom + labelSize.height),
                                                Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0, 255));
                            }
                        }
                    }

                    detectionMat.Dispose();
                }

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Esempio n. 28
0
    /// <summary>
    /// Predict
    /// </summary>
    /// <param name="img"></param>
    public void Predict(Mat img)
    {
        var blob = Dnn.blobFromImage(img, 1.0 / 255.0, new Size(inputImageSize, inputImageSize), new Scalar(0.0, 0.0, 0.0), false, false);

        Onnx.setInput(blob);
        Onnx.forward(outputs, outputNames);

        // copy 2D outputs
        Marshal.Copy((IntPtr)outputs[2].dataAddr(), heatMap2Dbuf, 0, heatMap2Dbuf.Length);
        Buffer.BlockCopy(heatMap2Dbuf, 0, heatMap2D, 0, heatMap2Dbuf.Length);
        Marshal.Copy((IntPtr)outputs[3].dataAddr(), offset2Dbuf, 0, offset2Dbuf.Length);
        Buffer.BlockCopy(offset2Dbuf, 0, offset2D, 0, offset2Dbuf.Length);
        for (var j = 0; j < JointNum; j++)
        {
            var maxXIndex = 0;
            var maxYIndex = 0;
            jointPoints[j].score2D = 0.0f;
            for (var y = 0; y < HeatMapCol; y++)
            {
                for (var x = 0; x < HeatMapCol; x++)
                {
                    var l = new List <int>();
                    var v = heatMap2D[(HeatMapCol_Squared) * j + HeatMapCol * y + x];

                    if (v > jointPoints[j].score2D)
                    {
                        jointPoints[j].score2D = v;
                        maxXIndex = x;
                        maxYIndex = y;
                    }
                }
            }

            jointPoints[j].Pos2D.x = (offset2D[HeatMapCol_Squared * j + HeatMapCol * maxYIndex + maxXIndex] + maxXIndex / (float)HeatMapCol) * (float)inputImageSize;
            jointPoints[j].Pos2D.y = (offset2D[HeatMapCol_Squared * (j + JointNum) + HeatMapCol * maxYIndex + maxXIndex] + maxYIndex / (float)HeatMapCol) * (float)inputImageSize;
        }

        // copy 3D outputs
        Marshal.Copy((IntPtr)outputs[0].dataAddr(), heatMap3Dbuf, 0, heatMap3Dbuf.Length);
        Buffer.BlockCopy(heatMap3Dbuf, 0, heatMap3D, 0, heatMap3Dbuf.Length);
        Marshal.Copy((IntPtr)outputs[1].dataAddr(), offset3Dbuf, 0, offset3Dbuf.Length);
        Buffer.BlockCopy(offset3Dbuf, 0, offset3D, 0, offset3Dbuf.Length);
        for (var j = 0; j < JointNum; j++)
        {
            var maxXIndex = 0;
            var maxYIndex = 0;
            var maxZIndex = 0;
            jointPoints[j].score3D = 0.0f;
            for (var z = 0; z < HeatMapCol; z++)
            {
                for (var y = 0; y < HeatMapCol; y++)
                {
                    for (var x = 0; x < HeatMapCol; x++)
                    {
                        float v = heatMap3D[HeatMapCol_Cube * j + HeatMapCol_Squared * z + HeatMapCol * y + x];
                        if (v > jointPoints[j].score3D)
                        {
                            jointPoints[j].score3D = v;
                            maxXIndex = x;
                            maxYIndex = y;
                            maxZIndex = z;
                        }
                    }
                }
            }

            jointPoints[j].Now3D.x = (offset3D[HeatMapCol_Cube * j + HeatMapCol_Squared * maxZIndex + HeatMapCol * maxYIndex + maxXIndex] + (float)maxXIndex / (float)HeatMapCol) * (float)inputImageSize;
            jointPoints[j].Now3D.y = (float)inputImageSize - (offset3D[HeatMapCol_Cube * (j + JointNum) + HeatMapCol_Squared * maxZIndex + HeatMapCol * maxYIndex + maxXIndex] + (float)maxYIndex / (float)HeatMapCol) * (float)inputImageSize;
            jointPoints[j].Now3D.z = (offset3D[HeatMapCol_Cube * (j + JointNum * 2) + HeatMapCol_Squared * maxZIndex + HeatMapCol * maxYIndex + maxXIndex] + (float)(maxZIndex - 7) / (float)HeatMapCol) * (float)inputImageSize;
        }

        // Calculate hip location
        var lc = (jointPoints[PositionIndex.rThighBend.Int()].Now3D + jointPoints[PositionIndex.lThighBend.Int()].Now3D) / 2f;

        jointPoints[PositionIndex.hip.Int()].Now3D = (jointPoints[PositionIndex.abdomenUpper.Int()].Now3D + lc) / 2f;
        // Calculate neck location
        jointPoints[PositionIndex.neck.Int()].Now3D = (jointPoints[PositionIndex.rShldrBend.Int()].Now3D + jointPoints[PositionIndex.lShldrBend.Int()].Now3D) / 2f;
        // Calculate head location
        var cEar = (jointPoints[PositionIndex.rEar.Int()].Now3D + jointPoints[PositionIndex.lEar.Int()].Now3D) / 2f;
        var hv   = cEar - jointPoints[PositionIndex.neck.Int()].Now3D;
        var nhv  = Vector3.Normalize(hv);
        var nv   = jointPoints[PositionIndex.Nose.Int()].Now3D - jointPoints[PositionIndex.neck.Int()].Now3D;

        jointPoints[PositionIndex.head.Int()].Now3D = jointPoints[PositionIndex.neck.Int()].Now3D + nhv * Vector3.Dot(nhv, nv);
        // Calculate spine location
        jointPoints[PositionIndex.spine.Int()].Now3D = jointPoints[PositionIndex.abdomenUpper.Int()].Now3D;

        // Low pass filter
        foreach (var jp in jointPoints)
        {
            jp.Pos3D     = jp.PrevPos3D * 0.5f + jp.Now3D * 0.5f;
            jp.PrevPos3D = jp.Pos3D;
        }
    }
Esempio n. 29
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            if (!string.IsNullOrEmpty(classes))
            {
                classNames = readClassNames(classes_filepath);
                if (classNames == null)
                {
                    Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                }
            }
            else if (classesList.Count > 0)
            {
                classNames = classesList;
            }

            Mat img = Imgcodecs.imread(input_filepath);

            if (img.empty())
            {
                Debug.LogError(input_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath))
            {
                Debug.LogError(config_filepath + " or " + model_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                //! [Initialize network]
                net = Dnn.readNet(model_filepath, config_filepath);
                //! [Initialize network]
            }


            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                outBlobNames = getOutputsNames(net);
                //for (int i = 0; i < outBlobNames.Count; i++)
                //{
                //    Debug.Log("names [" + i + "] " + outBlobNames[i]);
                //}

                outBlobTypes = getOutputsTypes(net);
                //for (int i = 0; i < outBlobTypes.Count; i++)
                //{
                //    Debug.Log("types [" + i + "] " + outBlobTypes[i]);
                //}


                // Create a 4D blob from a frame.
                Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                        inpHeight > 0 ? inpHeight : img.rows());
                Mat blob = Dnn.blobFromImage(img, scale, inpSize, mean, swapRB, false);


                // Run a model.
                net.setInput(blob);

                if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1)
                {  // Faster-RCNN or R-FCN
                    Imgproc.resize(img, img, inpSize);
                    Mat imInfo = new Mat(1, 3, CvType.CV_32FC1);
                    imInfo.put(0, 0, new float[] {
                        (float)inpSize.height,
                        (float)inpSize.width,
                        1.6f
                    });
                    net.setInput(imInfo, "im_info");
                }


                TickMeter tm = new TickMeter();
                tm.start();


                List <Mat> outs = new List <Mat>();
                net.forward(outs, outBlobNames);


                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());


                postprocess(img, outs, net);

                for (int i = 0; i < outs.Count; i++)
                {
                    outs[i].Dispose();
                }
                blob.Dispose();
                net.Dispose();
            }


            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
Esempio n. 30
0
        // Use this for initialization
        void Start()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            Mat img = Imgcodecs.imread(Utils.getFilePath("dnn/004545.jpg"));

            #if !UNITY_WSA_10_0
            if (img.empty())
            {
                Debug.LogError("dnn/004545.jpg is not loaded.The image file can be downloaded here: \"https://github.com/chuanqi305/MobileNet-SSD/blob/master/images/004545.jpg\".Please copy to \"Assets/StreamingAssets/dnn/\" folder. ");
                img = new Mat(375, 500, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }
            #endif

            Size inVideoSize = new Size(img.width(), img.height());
            Size cropSize;
            if (inVideoSize.width / (float)inVideoSize.height > WHRatio)
            {
                cropSize = new Size(inVideoSize.height * WHRatio, inVideoSize.height);
            }
            else
            {
                cropSize = new Size(inVideoSize.width, inVideoSize.width / WHRatio);
            }
            OpenCVForUnity.Rect crop = new OpenCVForUnity.Rect(new Point((inVideoSize.width - cropSize.width) / 2, (inVideoSize.height - cropSize.height) / 2), cropSize);


            Net net = null;

            string model_filepath    = Utils.getFilePath("dnn/MobileNetSSD_deploy.caffemodel");
            string prototxt_filepath = Utils.getFilePath("dnn/MobileNetSSD_deploy.prototxt");

            if (string.IsNullOrEmpty(model_filepath) || string.IsNullOrEmpty(prototxt_filepath))
            {
                Debug.LogError("model file is not loaded.The model and prototxt file can be downloaded here: \"https://github.com/chuanqi305/MobileNet-SSD\".Please copy to “Assets/StreamingAssets/dnn/” folder. ");
            }
            else
            {
                net = Dnn.readNetFromCaffe(prototxt_filepath, model_filepath);
            }

            if (net == null)
            {
                img = new Mat(img, crop);

                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                Mat blob = Dnn.blobFromImage(img, inScaleFactor, new Size(inWidth, inHeight), new Scalar(meanVal), false, true);

                net.setInput(blob);


                TickMeter tm = new TickMeter();
                tm.start();

                Mat prob = net.forward();
                prob = prob.reshape(1, (int)prob.total() / 7);

                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());


                img = new Mat(img, crop);

                float[] data = new float[7];

                float confidenceThreshold = 0.2f;
                for (int i = 0; i < prob.rows(); i++)
                {
                    prob.get(i, 0, data);

                    float confidence = data [2];

                    if (confidence > confidenceThreshold)
                    {
                        int class_id = (int)(data [1]);

                        float xLeftBottom = data [3] * img.cols();
                        float yLeftBottom = data [4] * img.rows();
                        float xRightTop   = data [5] * img.cols();
                        float yRightTop   = data [6] * img.rows();

                        Debug.Log("class_id: " + class_id);
                        Debug.Log("Confidence: " + confidence);

                        Debug.Log(" " + xLeftBottom
                                  + " " + yLeftBottom
                                  + " " + xRightTop
                                  + " " + yRightTop);

                        Imgproc.rectangle(img, new Point(xLeftBottom, yLeftBottom), new Point(xRightTop, yRightTop),
                                          new Scalar(0, 255, 0), 2);
                        string label     = classNames [class_id] + ": " + confidence;
                        int[]  baseLine  = new int[1];
                        Size   labelSize = Imgproc.getTextSize(label, Core.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine);

                        Imgproc.rectangle(img, new Point(xLeftBottom, yLeftBottom),
                                          new Point(xLeftBottom + labelSize.width, yLeftBottom + labelSize.height + baseLine [0]),
                                          new Scalar(255, 255, 255), Core.FILLED);
                        Imgproc.putText(img, label, new Point(xLeftBottom, yLeftBottom + labelSize.height),
                                        Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0));
                    }
                }

                prob.Dispose();
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }