void Start() { graph_filepath = Utils.getFilePath("dnn/graph1.pb"); webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); Mat img = webCamTextureToMatHelper.GetMat(); if (!string.IsNullOrEmpty(graph_filepath)) { net = Dnn.readNetFromTensorflow(graph_filepath); } if (net == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); net = Dnn.readNetFromTensorflow(tensorflow_inception_graph_pb_filepath); #if !UNITY_WSA_10_0 if (net.empty()) { Debug.LogError("model file is not loaded. The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\". Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif classes = readClassNames(imagenet_comp_graph_label_strings_txt_filepath); #if !UNITY_WSA_10_0 if (classes == null) { Debug.LogError("class names list file is not loaded. The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\". Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); }
// Start is called before the first frame update void Start() { answerText = GameObject.Find("Text"); mnist_dataset_idx = 0; mnist_dataset = read_mnist_dataset(); string model_filepath = Utils.getFilePath(MODEL_FILE_PATH); net = Dnn.readNetFromTensorflow(model_filepath); if (net.empty()) { Debug.LogError("model file is not loaded."); } show_image(); predict(); }
// Use this for initialization void Start() { net = Dnn.readNetFromTensorflow(Utils.getFilePath("dnn/tensorflow_inception_graph.pb")); #if !UNITY_WSA_10_0 if (net.empty()) { Debug.LogError("model file is not loaded.The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\".Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif classes = readClassNames(Utils.getFilePath("dnn/imagenet_comp_graph_label_strings.txt")); #if !UNITY_WSA_10_0 if (classes == null) { Debug.LogError("class names list file is not loaded.The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\".Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> (); webCamTextureToMatHelper.Initialize(); }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); net = Dnn.readNetFromTensorflow(tensorflow_inception_graph_pb_filepath); #if !UNITY_WSA_10_0 if (net.empty()) { Debug.LogError("model file is not loaded.The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\".Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif classes = readClassNames(imagenet_comp_graph_label_strings_txt_filepath); #if !UNITY_WSA_10_0 if (classes == null) { Debug.LogError("class names list file is not loaded.The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\".Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif #if UNITY_ANDROID && !UNITY_EDITOR // Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2) // https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/ // https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178 rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS; if (webCamTextureToMatHelper.requestedIsFrontFacing) { webCamTextureToMatHelper.requestedFPS = 15; webCamTextureToMatHelper.Initialize(); } else { webCamTextureToMatHelper.Initialize(); } #else webCamTextureToMatHelper.Initialize(); #endif }
private void LoadDetector() { if (handDetectionMode == 0) { var cascadeFileName = Utils.getFilePath("palm.xml"); cascadeDetector = new CascadeClassifier(); cascadeDetector.load(cascadeFileName); if (cascadeDetector.empty()) { Debug.LogError("cascade file is not loaded. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); } } else if (handDetectionMode == 2) { var modelPath = Utils.getFilePath("frozen_inference_graph.pb"); var configPath = Utils.getFilePath("frozen_inference_graph.pbtxt"); tfDetector = Dnn.readNetFromTensorflow(modelPath, configPath); if (tfDetector.empty()) { Debug.Log("tf detector is empty"); } } }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); net = Dnn.readNetFromTensorflow(tensorflow_inception_graph_pb_filepath); #if !UNITY_WSA_10_0 if (net.empty()) { Debug.LogError("model file is not loaded.The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\".Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif classes = readClassNames(imagenet_comp_graph_label_strings_txt_filepath); #if !UNITY_WSA_10_0 if (classes == null) { Debug.LogError("class names list file is not loaded.The model and class names list can be downloaded here: \"https://storage.googleapis.com/download.tensorflow.org/models/inception5h.zip\".Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #endif webCamTextureToMatHelper.Initialize(); }
void Run(string jpg_path) { Utils.setDebugMode(true); Mat img = Imgcodecs.imread(jpg_path); gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1); float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } Net net = null; if (!string.IsNullOrEmpty(graph_filepath)) { net = Dnn.readNetFromTensorflow(graph_filepath); } if (net == null) { Imgproc.putText(img, "Model file is not loaded.", new Point(5, img.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } else { float frameWidth = img.cols(); float frameHeight = img.rows(); Mat input = Dnn.blobFromImage(img, 1.0, new Size(inWidth, inHeight), new Scalar(0, 0, 0), false, false); net.setInput(input, "image"); Mat output = net.forward("Openpose/concat_stage7"); output = output.reshape(1, 57); List <Point> points = new List <Point>(); for (int i = 0; i < BODY_PARTS.Count; i++) { Mat heatMap = output.row(i).reshape(1, 46); Core.MinMaxLocResult result = Core.minMaxLoc(heatMap); heatMap.Dispose(); double x = (frameWidth * result.maxLoc.x) / 46; double y = (frameHeight * result.maxLoc.y) / 46; if (result.maxVal > 0.3) { points.Add(new Point(x, y)); } else { points.Add(null); } } for (int i = 0; i < POSE_PAIRS.GetLength(0); i++) { string partFrom = POSE_PAIRS[i, 0]; string partTo = POSE_PAIRS[i, 1]; int idFrom = BODY_PARTS[partFrom]; int idTo = BODY_PARTS[partTo]; if (points[idFrom] != null && points[idTo] != null) { Imgproc.line(img, points[idFrom], points[idTo], new Scalar(0, 255, 0), 3); Imgproc.ellipse(img, points[idFrom], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED); Imgproc.ellipse(img, points[idTo], new Size(3, 3), 0, 0, 360, new Scalar(0, 0, 255), Core.FILLED); } } } Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; Utils.setDebugMode(false); }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); classNames = readClassNames(classes_filepath); if (classNames == null) { Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } classColors = new List <Scalar> (); for (int i = 0; i < classNames.Count; i++) { classColors.Add(new Scalar(UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255))); } Mat img = Imgcodecs.imread(image_filepath); if (img.empty()) { Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); img = new Mat(height, width, CvType.CV_8UC3, new Scalar(0, 0, 0)); } //Adust Quad.transform.localScale. gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } Net net = null; if (string.IsNullOrEmpty(model_filepath) || string.IsNullOrEmpty(config_filepath)) { Debug.LogError(model_filepath + " or " + config_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } else { net = Dnn.readNetFromTensorflow(model_filepath, config_filepath); } if (net == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } else { float frameWidth = img.cols(); float frameHeight = img.rows(); Mat blob = Dnn.blobFromImage(img, 1.0, new Size(width, height), new Scalar(0, 0, 0), true, false); net.setInput(blob); List <Mat> outputBlobs = new List <Mat> (); List <string> outputName = new List <string> (); outputName.Add("detection_out_final"); outputName.Add("detection_masks"); net.forward(outputBlobs, outputName); Mat boxes = outputBlobs [0]; Mat masks = outputBlobs [1]; //int numClasses = masks.size (1); //int numDetections = boxes.size (2); Debug.Log("boxes.size(0) " + boxes.size(0)); Debug.Log("boxes.size(1) " + boxes.size(1)); Debug.Log("boxes.size(2) " + boxes.size(2)); Debug.Log("boxes.size(3) " + boxes.size(3)); Debug.Log("masks.size(0) " + masks.size(0)); Debug.Log("masks.size(1) " + masks.size(1)); Debug.Log("masks.size(2) " + masks.size(2)); Debug.Log("masks.size(3) " + masks.size(3)); //reshape from 4D to two 2D. float[] data = new float[boxes.size(3)]; boxes = boxes.reshape(1, (int)boxes.total() / boxes.size(3)); // Debug.Log ("boxes.ToString() " + boxes.ToString ()); //reshape from 4D to two 2D. float[] mask_data = new float[masks.size(2) * masks.size(3)]; masks = masks.reshape(1, (int)masks.total() / (masks.size(2) * masks.size(3))); // Debug.Log ("masks.ToString(): " + masks.ToString ()); for (int i = 0; i < boxes.rows(); i++) { boxes.get(i, 0, data); float score = data [2]; if (score > thr) { int class_id = (int)(data [1]); float left = (float)(data [3] * frameWidth); float top = (float)(data [4] * frameHeight); float right = (float)(data [5] * frameWidth); float bottom = (float)(data [6] * frameHeight); left = (int)Mathf.Max(0, Mathf.Min(left, frameWidth - 1)); top = (int)Mathf.Max(0, Mathf.Min(top, frameHeight - 1)); right = (int)Mathf.Max(0, Mathf.Min(right, frameWidth - 1)); bottom = (int)Mathf.Max(0, Mathf.Min(bottom, frameHeight - 1)); Debug.Log("class_id: " + class_id + " class_name " + classNames [class_id] + " left: " + left + " top: " + top + " right: " + right + " bottom: " + bottom); //draw masks masks.get((i * 90) + class_id, 0, mask_data); Mat objectMask = new Mat(15, 15, CvType.CV_32F); Utils.copyToMat <float> (mask_data, objectMask); Imgproc.resize(objectMask, objectMask, new Size(right - left + 1, bottom - top + 1)); Core.compare(objectMask, new Scalar(mask_thr), objectMask, Core.CMP_GT); // Debug.Log ("objectMask.ToString(): " + objectMask.ToString ()); // Debug.Log ("objectMask.dump(): " + objectMask.dump ()); Mat roi = new Mat(img, new OpenCVForUnity.CoreModule.Rect(new Point(left, top), new Point(right + 1, bottom + 1))); Mat coloredRoi = new Mat(roi.size(), CvType.CV_8UC3); Imgproc.rectangle(coloredRoi, new Point(0, 0), new Point(coloredRoi.width(), coloredRoi.height()), classColors [class_id], -1); Core.addWeighted(coloredRoi, 0.7, roi, 0.3, 0, coloredRoi); // Debug.Log ("coloredRoi.ToString(): " + coloredRoi.ToString ()); // Debug.Log ("roi.ToString(): " + roi.ToString ()); coloredRoi.copyTo(roi, objectMask); coloredRoi.Dispose(); objectMask.Dispose(); //draw boxes Imgproc.rectangle(img, new Point(left, top), new Point(right, bottom), new Scalar(0, 255, 0), 2); string label = score.ToString(); if (classNames != null && classNames.Count != 0) { if (class_id < (int)classNames.Count) { label = classNames [class_id] + ": " + label; } } int[] baseLine = new int[1]; Size labelSize = Imgproc.getTextSize(label, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, 1, baseLine); top = Mathf.Max(top, (int)labelSize.height); Imgproc.rectangle(img, new Point(left, top - labelSize.height), new Point(left + labelSize.width, top + baseLine [0]), Scalar.all(255), Core.FILLED); Imgproc.putText(img, label, new Point(left, top), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 0, 255)); } } boxes.Dispose(); masks.Dispose(); blob.Dispose(); } Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; net.Dispose(); Utils.setDebugMode(false); }
private void LoadModel() { string model_filepath = Utils.getFilePath(MODEL_FILE_PATH); this.net = Dnn.readNetFromTensorflow(model_filepath); }
// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); classNames = readClassNames(classes_filepath); if (classNames == null) { Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } classColors = new List <Scalar>(); for (int i = 0; i < classNames.Count; i++) { classColors.Add(new Scalar(UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255), UnityEngine.Random.Range(0, 255))); } Mat img = Imgcodecs.imread(image_filepath); if (img.empty()) { Debug.LogError(image_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); img = new Mat(height, width, CvType.CV_8UC3, new Scalar(0, 0, 0)); } //Adust Quad.transform.localScale. gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } Net net = null; if (string.IsNullOrEmpty(model_filepath) || string.IsNullOrEmpty(config_filepath)) { Debug.LogError(model_filepath + " or " + config_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } else { net = Dnn.readNetFromTensorflow(model_filepath, config_filepath); } if (net == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } else { float frameW = img.cols(); float frameH = img.rows(); // Create a 4D blob from a frame. Mat blob = Dnn.blobFromImage(img, 1.0, new Size(width, height), new Scalar(0, 0, 0), true, false); //Run a model net.setInput(blob); List <Mat> outputBlobs = new List <Mat>(); List <string> outputName = new List <string>(); outputName.Add("detection_out_final"); outputName.Add("detection_masks"); TickMeter tm = new TickMeter(); tm.start(); net.forward(outputBlobs, outputName); tm.stop(); Debug.Log("Inference time, ms: " + tm.getTimeMilli()); Mat boxes = outputBlobs[0]; Mat masks = outputBlobs[1]; //int numClasses = masks.size(1); int numDetections = boxes.size(2); int mask_sizeH = masks.size(2); int mask_sizeW = masks.size(3); float[] box_data = new float[boxes.size(3)]; float[] mask_data = new float[masks.size(2) * masks.size(3)]; for (int i = 0; i < numDetections; i++) { boxes.get(new int[] { 0, 0, i, 0 }, box_data); float score = box_data[2]; if (score > thr) { int classId = (int)box_data[1]; float left = (int)frameW * box_data[3]; float top = (int)frameH * box_data[4]; float right = (int)frameW * box_data[5]; float bottom = (int)frameH * box_data[6]; left = (int)Mathf.Max(0, Mathf.Min(left, frameW - 1)); top = (int)Mathf.Max(0, Mathf.Min(top, frameH - 1)); right = (int)Mathf.Max(0, Mathf.Min(right, frameW - 1)); bottom = (int)Mathf.Max(0, Mathf.Min(bottom, frameH - 1)); masks.get(new int[] { i, classId, 0, 0 }, mask_data); Mat classMask = new Mat(mask_sizeH, mask_sizeW, CvType.CV_32F); classMask.put(0, 0, mask_data); Imgproc.resize(classMask, classMask, new Size(right - left + 1, bottom - top + 1)); Core.compare(classMask, new Scalar(0.5), classMask, Core.CMP_GT); Mat roi = new Mat(img, new OpenCVForUnity.CoreModule.Rect(new Point(left, top), new Point(right + 1, bottom + 1))); Mat coloredRoi = new Mat(roi.size(), CvType.CV_8UC3); Imgproc.rectangle(coloredRoi, new Point(0, 0), new Point(coloredRoi.width(), coloredRoi.height()), classColors[classId], -1); Core.addWeighted(coloredRoi, 0.7, roi, 0.3, 0, coloredRoi); coloredRoi.copyTo(roi, classMask); coloredRoi.Dispose(); classMask.Dispose(); drawPred(classId, score, left, top, right, bottom, img); Debug.Log("classId:" + classId + " cnof:" + score + " l:" + left + " t:" + top + " r:" + right + " b:" + bottom); } } boxes.Dispose(); masks.Dispose(); blob.Dispose(); } Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; net.Dispose(); Utils.setDebugMode(false); }
public void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); classNames = readClassNames(classes_filepath); if (classNames == null) { Debug.LogError(classes_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } //Mat img = new Mat(imageTex.height, imageTex.width, CvType.); //Utils.texture2DToMat(imageTex, img); Mat img = Imgcodecs.imread(input_filepath); if (img.empty()) { Debug.LogError(input_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0)); } //Adust Quad.transform.localScale. gameObject.transform.localScale = new Vector3(img.width(), img.height(), 1); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); float imageWidth = img.width(); float imageHeight = img.height(); float widthScale = (float)Screen.width / imageWidth; float heightScale = (float)Screen.height / imageHeight; if (widthScale < heightScale) { Camera.main.orthographicSize = (imageWidth * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = imageHeight / 2; } Net net = null; if (string.IsNullOrEmpty(model_filepath) || string.IsNullOrEmpty(config_filepath)) { Debug.LogError(model_filepath + " or " + config_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); } else { net = Dnn.readNetFromTensorflow(model_filepath, config_filepath); } if (net == null) { Imgproc.putText(img, "model file is not loaded.", new Point(5, img.rows() - 30), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(img, "Please read console message.", new Point(5, img.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false); } else { outBlobNames = getOutputsNames(net); outBlobTypes = getOutputsTypes(net); Mat blob = Dnn.blobFromImage(img, 0.007843, new Size(300, 300), new Scalar(127.5, 127.5, 127.5)); net.setInput(blob); TickMeter tm = new TickMeter(); tm.start(); List <Mat> outs = new List <Mat>(); net.forward(outs, outBlobNames); tm.stop(); Debug.Log("Inference time, ms: " + tm.getTimeMilli()); postprocess(img, outs, net); for (int i = 0; i < outs.Count; i++) { outs[i].Dispose(); } blob.Dispose(); net.Dispose(); Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Texture2D texture = new Texture2D(img.cols(), img.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; Utils.setDebugMode(false); } }