// Use this for initialization void Run() { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); net = Dnn.readNetFromTorch(model_filepath); if (net.empty()) { Debug.LogError("model file is not loaded. The model and class names list can be downloaded here: \"https://cs.stanford.edu/people/jcjohns/fast-neural-style/models/instance_norm/mosaic.t7\". Please copy to “Assets/StreamingAssets/dnn/” folder. "); } #if UNITY_ANDROID && !UNITY_EDITOR // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2). webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true; #endif webCamTextureToMatHelper.Initialize(); }
public FaceEmbeddings ExtractFaceEmbeddings(Texture2D imageTex, UnityEngine.Rect ROI) { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); var embedder = Dnn.readNetFromTorch(model_filepath); Mat img = new Mat(imageTex.height, imageTex.width, CvType.CV_8UC3); Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); Utils.texture2DToMat(imageTex, img); if (img.empty()) { Debug.LogError(input_filepath + " is not loaded. Please see \"StreamingAssets/dnn/setup_dnn_module.pdf\". "); img = new Mat(424, 640, CvType.CV_8UC3, new Scalar(0, 0, 0)); } Mat cropped_face = img.submat((int)ROI.y, (int)ROI.y + (int)ROI.height, (int)ROI.x, (int)ROI.width + (int)ROI.x); Imgproc.cvtColor(cropped_face, cropped_face, Imgproc.COLOR_BGR2RGB); var faceBlob = Dnn.blobFromImage(cropped_face, scalefactor, new Size(inpWidth, inpHeight), new Scalar(0, 0, 0), true, false); embedder.setInput(faceBlob); var netOut = embedder.forward(); var embeddings = new FaceEmbeddings(netOut, 128); if (gameObject.GetComponent <Renderer>() != null && displayBB) { GenericUtils.AdjustImageScale(cropped_face, this.gameObject); Texture2D texture = new Texture2D(cropped_face.cols(), cropped_face.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(cropped_face, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; } embedder.Dispose(); cropped_face.Dispose(); img.Dispose(); netOut.Dispose(); return(embeddings); }
public Mat Extract(string path) { //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console. Utils.setDebugMode(true); var embedder = Dnn.readNetFromTorch(model_filepath); Mat img = Imgcodecs.imread(Utils.getFilePath("faces/" + path)); if (img.empty()) { Debug.LogError("image is not loaded"); return(img); } Imgproc.cvtColor(img, img, Imgproc.COLOR_BGR2RGB); var roi = GetBB(img); Mat cropped_face = img.submat((int)roi.y, (int)roi.y + (int)roi.height, (int)roi.x, (int)roi.width + (int)roi.x); var faceBlob = Dnn.blobFromImage(cropped_face, scalefactor, new Size(inpWidth, inpHeight), new Scalar(0, 0, 0), true, false); embedder.setInput(faceBlob); var netOut = embedder.forward(); if (gameObject.GetComponent <Renderer>() != null && displayBB) { GenericUtils.AdjustImageScale(cropped_face, this.gameObject); Texture2D texture = new Texture2D(cropped_face.cols(), cropped_face.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(cropped_face, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; } //_embedder.Dispose(); //cropped_face.Dispose(); img.Dispose(); return(netOut); }