public static Blur ( InputArray src, OutputArray dst, Size ksize, System.Point anchor = null, BorderTypes borderType = BorderTypes.Default ) : void | ||
src | InputArray | The source image |
dst | OutputArray | The destination image; will have the same size and the same type as src |
ksize | Size | The smoothing kernel size |
anchor | System.Point | The anchor point. The default value Point(-1,-1) means that the anchor is at the kernel center |
borderType | BorderTypes | The border mode used to extrapolate pixels outside of the image |
return | void |
private Point getEyePosition(Mat eye) { Point result; Cv2.Blur(eye, eye, new Size(5, 5)); Cv2.Laplacian(eye, eye, MatType.CV_64F); Cv2.MinMaxLoc(eye, out _, out _, out result, out _); return(result); }
/// <summary> /// Detector /// </summary> /// <param name="inputTexture">Input Unity texture</param> /// <param name="texParams">Texture parameters (flipped, rotated etc.)</param> /// <param name="detect">Flag signalling whether we need detection on this frame</param> public virtual Point[] ProcessTexture(T texture, Unity.TextureConversionParams texParams) { // convert Unity texture to OpenCv::Mat ImportTexture(texture, texParams); DataStabilizer.ThresholdFactor = 1; // convert to grayscale and normalize Mat gray = new Mat(); Cv2.CvtColor(processingImage, gray, ColorConversionCodes.BGR2GRAY); Cv2.Blur(gray, gray, new Size(10, 10)); // fix shadows // Cv2.EqualizeHist(gray, gray); // detect matching regions (faces bounding) Rect[] rawFaces = cascadeFaces.DetectMultiScale(gray, 1.2, 6); if (Faces.Count != rawFaces.Length) { Faces.Clear(); } // now per each detected face draw a marker and detect eyes inside the face rect int facesCount = 0; Point[] maxFace = lastFace; double maxFaceSize = 0; for (int i = 0; i < rawFaces.Length; ++i) { Rect faceRect = rawFaces[i]; using (Mat grayFace = new Mat(gray, faceRect)) { // another trick: confirm the face with eye detector, will cut some false positives if (cutFalsePositivesWithEyesSearch && null != cascadeEyes) { Rect[] eyes = cascadeEyes.DetectMultiScale(grayFace); if (eyes.Length == 0 || eyes.Length > 2) { continue; } } // get face object Demo.DetectedFace face = null; if (Faces.Count < i + 1) { face = new Demo.DetectedFace(DataStabilizer, faceRect); Faces.Add(face); } else { face = Faces[i]; face.SetRegion(faceRect); } // shape facesCount++; if (null != shapeFaces) { Point[] marks = shapeFaces.DetectLandmarks(gray, faceRect); // we have 68-point predictor if (marks.Length == 68) { double size = Point.DistancePow2(marks[0], marks[16]); if (size > maxFaceSize) { maxFaceSize = size; maxFace = marks; } } } } } if (maxFaceSize != 0) { lastFace = maxFace; } return(maxFace); }