// Use this for initialization
    void Run()
    {
        //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
        Utils.setDebugMode(false);

        if (!string.IsNullOrEmpty(classes))
        {
            classNames = readClassNames(classes_filepath);
            if (classNames == null)
            {
                Debug.LogError(classes_filepath +
                               " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
        }
        else if (classesList.Count > 0)
        {
            classNames = classesList;
        }

        if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath))
        {
            Debug.LogError(config_filepath + " or " + model_filepath +
                           " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
        }
        else
        {
            //! [Initialize network]
            net = Dnn.readNet(model_filepath, config_filepath);
            //! [Initialize network]

            outBlobNames = getOutputsNames(net);
            outBlobTypes = getOutputsTypes(net);
        }
    }
    protected virtual void SetupDnn()
    {
        if (classesList.Count > 0)
        {
            classNames = classesList;
        }

        if (!string.IsNullOrEmpty(model_filepath))
        {
            net = Dnn.readNet(model_filepath);
        }


        outBlobNames = getOutputsNames(net);
        //for (int i = 0; i < outBlobNames.Count; i++)
        //{
        //    Debug.Log("names [" + i + "] " + outBlobNames[i]);
        //}

        outBlobTypes = getOutputsTypes(net);
        //for (int i = 0; i < outBlobTypes.Count; i++)
        //{
        //    Debug.Log("types [" + i + "] " + outBlobTypes[i]);
        //}
    }
Example #3
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            if (!string.IsNullOrEmpty(classes))
            {
                classNames = readClassNames(classes_filepath);
                if (classNames == null)
                {
                    Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                }
            }
            else if (classesList.Count > 0)
            {
                classNames = classesList;
            }

            if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath))
            {
                Debug.LogError(config_filepath + " or " + model_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                //! [Initialize network]
                net = Dnn.readNet(model_filepath, config_filepath);
                //! [Initialize network]


                outBlobNames = getOutputsNames(net);
                //for (int i = 0; i < outBlobNames.Count; i++)
                //{
                //    Debug.Log("names [" + i + "] " + outBlobNames[i]);
                //}

                outBlobTypes = getOutputsTypes(net);
                //for (int i = 0; i < outBlobTypes.Count; i++)
                //{
                //    Debug.Log("types [" + i + "] " + outBlobTypes[i]);
                //}
            }


#if UNITY_ANDROID && !UNITY_EDITOR
            // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
            webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
            webCamTextureToMatHelper.Initialize();
            cursorObject.GetComponent <Cursor>().SetisTrigger(true);
            vocIDList = new List <int>();
        }
    protected virtual void SetupDnn()
    {
        if (!string.IsNullOrEmpty(classes))
        {
            classNames = readClassNames(classes_filepath);
            if (classNames == null)
            {
                Debug.LogError(classes_filepath + " is not loaded.");
            }
        }
        else if (classesList.Count > 0)
        {
            classNames = classesList;
        }

        if (!string.IsNullOrEmpty(config_filepath) && !string.IsNullOrEmpty(model_filepath))
        {
            net = Dnn.readNet(model_filepath, config_filepath);
        }
        else if (!string.IsNullOrEmpty(model_filepath))
        {
            net = Dnn.readNet(model_filepath);
        }
        else
        {
            Debug.LogError(config_filepath + " or " + model_filepath + " is not loaded.");
        }


        outBlobNames = getOutputsNames(net);
        //for (int i = 0; i < outBlobNames.Count; i++)
        //{
        //    Debug.Log("names [" + i + "] " + outBlobNames[i]);
        //}

        outBlobTypes = getOutputsTypes(net);
        //for (int i = 0; i < outBlobTypes.Count; i++)
        //{
        //    Debug.Log("types [" + i + "] " + outBlobTypes[i]);
        //}
    }
Example #5
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            if (!string.IsNullOrEmpty(classes))
            {
                classNames = readClassNames(classes_filepath);
                if (classNames == null)
                {
                    Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                }
            }
            else if (classesList.Count > 0)
            {
                classNames = classesList;
            }

            Mat img = Imgcodecs.imread(input_filepath);

            if (img.empty())
            {
                Debug.LogError(input_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath))
            {
                Debug.LogError(config_filepath + " or " + model_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                //! [Initialize network]
                net = Dnn.readNet(model_filepath, config_filepath);
                //! [Initialize network]
            }


            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                outBlobNames = getOutputsNames(net);
                //for (int i = 0; i < outBlobNames.Count; i++)
                //{
                //    Debug.Log("names [" + i + "] " + outBlobNames[i]);
                //}

                outBlobTypes = getOutputsTypes(net);
                //for (int i = 0; i < outBlobTypes.Count; i++)
                //{
                //    Debug.Log("types [" + i + "] " + outBlobTypes[i]);
                //}


                // Create a 4D blob from a frame.
                Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                        inpHeight > 0 ? inpHeight : img.rows());
                Mat blob = Dnn.blobFromImage(img, scale, inpSize, mean, swapRB, false);


                // Run a model.
                net.setInput(blob);

                if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1)
                {  // Faster-RCNN or R-FCN
                    Imgproc.resize(img, img, inpSize);
                    Mat imInfo = new Mat(1, 3, CvType.CV_32FC1);
                    imInfo.put(0, 0, new float[] {
                        (float)inpSize.height,
                        (float)inpSize.width,
                        1.6f
                    });
                    net.setInput(imInfo, "im_info");
                }


                TickMeter tm = new TickMeter();
                tm.start();


                List <Mat> outs = new List <Mat>();
                net.forward(outs, outBlobNames);


                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());


                postprocess(img, outs, net);

                for (int i = 0; i < outs.Count; i++)
                {
                    outs[i].Dispose();
                }
                blob.Dispose();
                net.Dispose();
            }


            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            if (!string.IsNullOrEmpty(classes))
            {
                classNames = readClassNames(classes_filepath);
#if !UNITY_WSA_10_0
                if (classNames == null)
                {
                    Debug.LogError("class names list file is not loaded.The model and class names list can be downloaded here: \"https://github.com/pjreddie/darknet/tree/master/data/coco.names\".Please copy to “Assets/StreamingAssets/dnn/” folder. ");
                }
#endif
            }
            else if (classesList.Count > 0)
            {
                classNames = classesList;
            }

            if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath))
            {
                Debug.LogError("model file is not loaded. the cfg-file and weights-file can be downloaded here: https://github.com/pjreddie/darknet/blob/master/cfg/tiny-yolo.cfg and https://pjreddie.com/media/files/tiny-yolo.weights. Please copy to “Assets/StreamingAssets/dnn/” folder. ");
            }
            else
            {
                //! [Initialize network]
                net = Dnn.readNet(model_filepath, config_filepath);
                //! [Initialize network]


                outBlobNames = getOutputsNames(net);
                //                for (int i = 0; i < outBlobNames.Count; i++) {
                //                    Debug.Log ("names [" + i + "] " + outBlobNames [i]);
                //                }

                outBlobTypes = getOutputsTypes(net);
                //                for (int i = 0; i < outBlobTypes.Count; i++) {
                //                    Debug.Log ("types [" + i + "] " + outBlobTypes [i]);
                //                }
            }


            #if UNITY_ANDROID && !UNITY_EDITOR
            // Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2)
            // https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/
            // https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
            rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS;
            if (webCamTextureToMatHelper.requestedIsFrontFacing)
            {
                webCamTextureToMatHelper.requestedFPS = 15;
                webCamTextureToMatHelper.Initialize();
            }
            else
            {
                webCamTextureToMatHelper.Initialize();
            }
            #else
            webCamTextureToMatHelper.Initialize();
            #endif
        }
Example #7
0
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image_filepath);

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }


            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net net = null;

            if (string.IsNullOrEmpty(caffemodel_filepath) || string.IsNullOrEmpty(prototxt_filepath))
            {
                Debug.LogError(caffemodel_filepath + " or " + prototxt_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                net = Dnn.readNet(prototxt_filepath, caffemodel_filepath);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                float frameWidth  = img.cols();
                float frameHeight = img.rows();

                Mat input = Dnn.blobFromImage(img, inScale, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false);

                net.setInput(input);

                //TickMeter tm = new TickMeter ();
                //tm.start ();

                Mat output = net.forward();

                //tm.stop ();
                //Debug.Log ("Inference time, ms: " + tm.getTimeMilli ());

                //Debug.Log("output.size(0) " + output.size(0));
                //Debug.Log("output.size(1) " + output.size(1));
                //Debug.Log("output.size(2) " + output.size(2));
                //Debug.Log("output.size(3) " + output.size(3));

                float[] data = new float[output.size(2) * output.size(3)];

                output = output.reshape(1, output.size(1));

                List <Point> points = new List <Point>();
                for (int i = 0; i < BODY_PARTS.Count; i++)
                {
                    output.get(i, 0, data);

                    Mat heatMap = new Mat(1, data.Length, CvType.CV_32FC1);
                    heatMap.put(0, 0, data);


                    //Originally, we try to find all the local maximums. To simplify a sample
                    //we just find a global one. However only a single pose at the same time
                    //could be detected this way.
                    Core.MinMaxLocResult result = Core.minMaxLoc(heatMap);

                    heatMap.Dispose();


                    double x = (frameWidth * (result.maxLoc.x % 46)) / 46;
                    double y = (frameHeight * (result.maxLoc.x / 46)) / 46;

                    if (result.maxVal > 0.1)
                    {
                        points.Add(new Point(x, y));
                    }
                    else
                    {
                        points.Add(null);
                    }
                }

                for (int i = 0; i < POSE_PAIRS.GetLength(0); i++)
                {
                    string partFrom = POSE_PAIRS[i, 0];
                    string partTo   = POSE_PAIRS[i, 1];

                    int idFrom = BODY_PARTS[partFrom];
                    int idTo   = BODY_PARTS[partTo];

                    if (points[idFrom] != null && points[idTo] != null)
                    {
                        Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3);
                        Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                        Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED);
                    }
                }

                MatOfDouble timings = new MatOfDouble();
                long        t       = net.getPerfProfile(timings);
                Debug.Log("t: " + t);
                Debug.Log("timings.dump(): " + timings.dump());

                double freq = Core.getTickFrequency() / 1000;
                Debug.Log("freq: " + freq);

                Imgproc.putText(img, (t / freq) + "ms", new Point(10, img.height() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.6, new Scalar(0, 0, 255), 2);
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);


            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
        // Use this for initialization
        void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image_filepath, Imgcodecs.IMREAD_COLOR);

            if (img.empty())
            {
                Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                img = new Mat(368, 368, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }

            //Adust Quad.transform.localScale.
            gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float imageWidth  = img.width();
            float imageHeight = img.height();

            float widthScale  = (float)Screen.width / imageWidth;
            float heightScale = (float)Screen.height / imageHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = imageHeight / 2;
            }


            Net detector   = null;
            Net recognizer = null;

            if (string.IsNullOrEmpty(detectionmodel_filepath) || string.IsNullOrEmpty(recognitionmodel_filepath))
            {
                Debug.LogError(detectionmodel_filepath + " or " + recognitionmodel_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                detector   = Dnn.readNet(detectionmodel_filepath);
                recognizer = Dnn.readNet(recognitionmodel_filepath);
            }

            if (detector == null || recognizer == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                TickMeter tickMeter = new TickMeter();

                List <Mat>    outs     = new List <Mat>();
                List <string> outNames = new List <string>();
                outNames.Add("feature_fusion/Conv_7/Sigmoid");
                outNames.Add("feature_fusion/concat_3");

                // Create a 4D blob from a frame.
                Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(), inpHeight > 0 ? inpHeight : img.rows());
                Mat  blob    = Dnn.blobFromImage(img, 1.0, inpSize, new Scalar(123.68, 116.78, 103.94), true, false); // blobFromImage(frame, blob, 1.0, Size(inpWidth, inpHeight), Scalar(123.68, 116.78, 103.94), true, false);

                // Run detection model.
                detector.setInput(blob);
                tickMeter.start();
                detector.forward(outs, outNames);
                tickMeter.stop();

                Mat scores   = outs[0];
                Mat geometry = outs[1];

                // Decode predicted bounding boxes.
                List <RotatedRect> boxes       = new List <RotatedRect>();
                List <float>       confidences = new List <float>();
                decodeBoundingBoxes(scores, geometry, confThreshold, boxes, confidences);


                // Apply non-maximum suppression procedure.
                MatOfRotatedRect boxesMat       = new MatOfRotatedRect(boxes.ToArray());
                MatOfFloat       confidencesMat = new MatOfFloat(confidences.ToArray());
                MatOfInt         indicesMat     = new MatOfInt();
                Dnn.NMSBoxesRotated(boxesMat, confidencesMat, confThreshold, nmsThreshold, indicesMat);

                List <int> indices = indicesMat.toList();
                Point      ratio   = new Point(img.cols() / inpWidth, img.rows() / inpHeight);

                // Render text.
                for (int i = 0; i < indices.Count; ++i)
                {
                    RotatedRect box = boxes[indices[i]];

                    Point[] vertices = new Point[4];
                    box.points(vertices);

                    for (int j = 0; j < 4; ++j)
                    {
                        vertices[j].x *= ratio.x;
                        vertices[j].y *= ratio.y;
                    }

                    for (int j = 0; j < 4; ++j)
                    {
                        Imgproc.line(img, vertices[j], vertices[(j + 1) % 4], new Scalar(0, 255, 0), 1);
                    }

                    if (recognizer != null)
                    {
                        Mat cropped = new Mat();
                        fourPointsTransform(img, vertices, cropped);

                        //Debug.Log(cropped);

                        Imgproc.cvtColor(cropped, cropped, Imgproc.COLOR_BGR2GRAY);

                        Mat blobCrop = Dnn.blobFromImage(cropped, 1.0 / 127.5, new Size(), Scalar.all(127.5));
                        recognizer.setInput(blobCrop);

                        //Debug.Log(blobCrop);

                        tickMeter.start();
                        Mat result = recognizer.forward();
                        tickMeter.stop();

                        string wordRecognized;
                        decodeText(result, out wordRecognized);
                        Imgproc.putText(img, wordRecognized, vertices[1], Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 0, 0), 1, Imgproc.LINE_AA, false);

                        Debug.Log(wordRecognized);


                        cropped.Dispose();
                        blobCrop.Dispose();
                        result.Dispose();
                    }
                }

                Debug.Log("Inference time, ms: " + tickMeter.getTimeMilli());

                for (int i = 0; i < outs.Count; i++)
                {
                    outs[i].Dispose();
                }
                blob.Dispose();
                detector.Dispose();
                recognizer.Dispose();
            }

            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);

            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(img, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
        // Use this for initialization
        void Run(Mat img)
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            if (!string.IsNullOrEmpty(classes))
            {
                classNames = readClassNames(classes_filepath);
#if !UNITY_WSA_10_0
                if (classNames == null)
                {
                    Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
                }
#endif
            }
            else if (classesList.Count > 0)
            {
                classNames = classesList;
            }



            Net net = null;

            if (string.IsNullOrEmpty(config_filepath) || string.IsNullOrEmpty(model_filepath))
            {
                Debug.LogError(config_filepath + " or " + model_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". ");
            }
            else
            {
                //! [Initialize network]
                net = Dnn.readNet(model_filepath, config_filepath);
                //! [Initialize network]
            }


            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                outBlobNames = getOutputsNames(net);
                //                for (int i = 0; i < outBlobNames.Count; i++) {
                //                    Debug.Log ("names [" + i + "] " + outBlobNames [i]);
                //                }

                outBlobTypes = getOutputsTypes(net);
                //                for (int i = 0; i < outBlobTypes.Count; i++) {
                //                    Debug.Log ("types [" + i + "] " + outBlobTypes [i]);
                //                }


                // Create a 4D blob from a frame.
                Size inpSize = new Size(inpWidth > 0 ? inpWidth : img.cols(),
                                        inpHeight > 0 ? inpHeight : img.rows());
                Mat blob = Dnn.blobFromImage(img, scale, inpSize, mean, swapRB, false);


                // Run a model.
                net.setInput(blob);

                if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1)
                {  // Faster-RCNN or R-FCN
                    Imgproc.resize(img, img, inpSize);
                    Mat imInfo = new Mat(1, 3, CvType.CV_32FC1);
                    imInfo.put(0, 0, new float[] {
                        (float)inpSize.height,
                        (float)inpSize.width,
                        1.6f
                    });
                    net.setInput(imInfo, "im_info");
                }


                TickMeter tm = new TickMeter();
                tm.start();


                List <Mat> outs = new List <Mat>();
                net.forward(outs, outBlobNames);


                tm.stop();
                Debug.Log("Inference time, ms: " + tm.getTimeMilli());


                postprocess(img, outs, net);

                for (int i = 0; i < outs.Count; i++)
                {
                    outs[i].Dispose();
                }
                blob.Dispose();
                net.Dispose();
            }


            Utils.setDebugMode(false);
        }
Example #10
0
        void ObjectDetection()
        {
            // If true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Mat img = Imgcodecs.imread(image);

            if (img.empty())
            {
                Debug.LogError("Image " + image + " is not loaded.");
                img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0));
            }


            Net net = null;

            if (string.IsNullOrEmpty(cfg) || string.IsNullOrEmpty(weight))
            {
                Debug.LogError(cfg + " or " + weight + " is not loaded.");
            }
            else
            {
                //load model and config
                net = Dnn.readNet(weight, cfg);
            }

            if (net == null)
            {
                Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
            }
            else
            {
                //setting blob, size can be:320/416/608
                //opencv blob setting can check here https://github.com/opencv/opencv/tree/master/samples/dnn#object-detection
                Mat blob = Dnn.blobFromImage(img, 1.0 / 255, new Size(416, 416), new Scalar(0), false, false);

                //input data
                net.setInput(blob);

                //get output layer name
                List <string> outNames = net.getUnconnectedOutLayersNames();
                //create mats for output layer
                List <Mat> outs = outNames.Select(_ => new Mat()).ToList();

                #region forward model
                TickMeter tm = new TickMeter();
                tm.start();

                net.forward(outs, outNames);

                tm.stop();
                Debug.Log("Runtime: " + tm.getTimeMilli() + " ms");
                #endregion

                //get result from all output
                GetResult(outs, img, threshold, nmsThreshold);
            }

            // Show Image
            Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB);
            Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false);
            Utils.matToTexture2D(img, texture);
            gameObject.GetComponent <Renderer>().material.mainTexture = texture;
            Utils.setDebugMode(false);
        }