/// <summary> /// Default initializer for MonoBehavior sub-classes /// </summary> protected override void Awake() { base.Awake(); base.forceFrontalCamera = true; // we work with frontal cams here, let's force it for macOS s MacBook doesn't state frontal cam correctly byte[] shapeDat = shapes.bytes; // if (shapeDat.Length == 0) // { // string errorMessage = // "In order to have Face Landmarks working you must download special pre-trained shape predictor " + // "available for free via DLib library website and replace a placeholder file located at " + // "\"OpenCV+Unity/Assets/Resources/shape_predictor_68_face_landmarks.bytes\"\n\n" + // "Without shape predictor demo will only detect face rects."; //#if UNITY_EDITOR // // query user to download the proper shape predictor // if (UnityEditor.EditorUtility.DisplayDialog("Shape predictor data missing", errorMessage, "Download", "OK, process with face rects only")) // Application.OpenURL("http://dlib.net/files/shape_predictor_68_face_landmarks.dat.bz2"); //#else // UnityEngine.Debug.Log(errorMessage); //#endif // } processor = new FaceProcessorLive <WebCamTexture>(); processor.Initialize(faces.text, eyes.text, shapes.bytes); // data stabilizer - affects face rects, face landmarks etc. processor.DataStabilizer.Enabled = true; // enable stabilizer processor.DataStabilizer.Threshold = 2.0; // threshold value in pixels processor.DataStabilizer.SamplesCount = 2; // how many samples do we need to compute stable data // performance data - some tricks to make it work faster processor.Performance.Downscale = 256; // processed image is pre-scaled down to N px by long side processor.Performance.SkipRate = 0; // we actually process only each Nth frame (and every frame for skipRate = 0) }
/// <summary> /// Default initializer for MonoBehavior sub-classes /// </summary> protected override void Awake() { base.Awake(); base.forceFrontalCamera = true; // we work with frontal cams here, let's force it for macOS s MacBook doesn't state frontal cam correctly byte[] shapeDat = shapes.bytes; processor = new FaceProcessorLive <WebCamTexture>(); processor.Initialize(faces.text, eyes.text, shapes.bytes); // data stabilizer - affects face rects, face landmarks etc. processor.DataStabilizer.Enabled = true; // enable stabilizer processor.DataStabilizer.Threshold = 2.0; // threshold value in pixels processor.DataStabilizer.SamplesCount = 2; // how many samples do we need to compute stable data // performance data - some tricks to make it work faster processor.Performance.Downscale = 256; // processed image is pre-scaled down to N px by long side processor.Performance.SkipRate = 0; // we actually process only each Nth frame (and every frame for skipRate = 0) }
protected override void Awake() { base.Awake(); forceFrontalCamera = true; string shapesFilePath = Application.dataPath + "/Data/" + shapesFileName; byte[] shapesBytes = File.ReadAllBytes(shapesFilePath); processor = new FaceProcessorLive <WebCamTexture>(); processor.Initialize(faces.text, eyes.text, shapesBytes); // data stabilizer - affects face rects, face landmarks etc. processor.DataStabilizer.Enabled = true; // enable stabilizer processor.DataStabilizer.Threshold = 2.0; // threshold value in pixels processor.DataStabilizer.SamplesCount = 2; // how many samples do we need to compute stable data // performance data - some tricks to make it work faster processor.Performance.Downscale = 256; // processed image is pre-scaled down to N px by long side processor.Performance.SkipRate = 0; // we actually process only each Nth frame (and every frame for skipRate = 0) }
/// <summary> /// Marks detected objects on the texture /// </summary> private Texture2D DrawCustomFaces(OpenCvSharp.Demo.FaceProcessorLive <WebCamTexture> processer, bool drawSubItems = true) { Mat Image = OpenCvSharp.Unity.TextureToMat(testRenderedTexture, TextureParameters); // mark each found eye foreach (OpenCvSharp.Demo.DetectedFace face in processer.Faces) { // face rect Cv2.Rectangle((InputOutputArray)Image, face.Region, Scalar.FromRgb(255, 0, 0), 2); // convex hull //Cv2.Polylines(Image, new IEnumerable<Point>[] { face.Info.ConvexHull }, true, Scalar.FromRgb(255, 0, 0), 2); // render face triangulation (should we have one) if (face.Info != null) { foreach (OpenCvSharp.Demo.DetectedFace.Triangle tr in face.Info.DelaunayTriangles) { Cv2.Polylines(Image, new IEnumerable <Point>[] { tr.ToArray() }, true, Scalar.FromRgb(0, 0, 255), 1); } } // Sub-items if (drawSubItems) { List <string> closedItems = new List <string>(new string[] { "Nose", "Eye", "Lip" }); foreach (OpenCvSharp.Demo.DetectedObject sub in face.Elements) { if (sub.Marks != null) { Cv2.Polylines(Image, new IEnumerable <Point>[] { sub.Marks }, closedItems.Contains(sub.Name), Scalar.FromRgb(0, 255, 0), 1); } } } } return(OpenCvSharp.Unity.MatToTexture(Image)); }