/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="image">Image.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(Texture2D image, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var cgimage = CGImage.FromTexture2D(image, preprocessImageScale); var ciimage = new CIImage(cgimage); return(DetectInImage(ciimage, imageOrientation)); }
/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="data">Pixel data.</param> /// <param name="width">Width of image.</param> /// <param name="height">Height of image.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInPixels32(Color32[] data, int width, int height, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var cgimage = CGImage.FromPixels32(data, width, height, preprocessImageScale); var ciimage = new CIImage(cgimage); return(DetectInImage(ciimage, imageOrientation)); }
void Update() { if (CoreXT.IsDevice) { if (_isDetecting && _cameraVideo.webCamTexture.didUpdateThisFrame) { // detect every x frames _frameCount++; if (_frameCount % detectEveryXFrames == 0) { CGImageOrientation orientation = _cameraVideo.cameraOrientationForFaceDetector; _faceDetector.isMirrored = _cameraVideo.isMirrored; _faceDetector.projectedScale = _cameraVideo.videoToCameraScale; // detect _faces = _faceDetector.DetectInPixels32(_cameraVideo.webCamTexture.GetPixels32(), _cameraVideo.webCamTexture.width, _cameraVideo.webCamTexture.height, orientation); foreach (var face in _faces) { Log("face: " + face.bounds + ", " + face.hasMouthPosition + ", " + face.leftEyePosition + ", " + face.rightEyePosition); } } } } }
/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="image">Image.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(Texture2D image, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var cgimage = CGImage.FromTexture2D(image); var ciimage = new CIImage(cgimage); // cgimage.Release(); return(DetectInImage(ciimage, imageOrientation)); }
private Vector2 _FixPoint(Vector2 point, int imageHeight, int imageWidth, CGImageOrientation imageOrientation) { point.y = imageHeight - point.y; // rotate coords if (correctOrientation) { float temp; switch (imageOrientation) { case CGImageOrientation.UpsideDown: point.x = imageWidth - point.x; point.y = imageHeight - point.y; if (isMirrored) { point.x = imageHeight - point.x; } break; case CGImageOrientation.RotatedLeft: temp = point.x; point.x = imageHeight - point.y; point.y = temp; if (isMirrored) { point.x = imageWidth - point.x; } break; case CGImageOrientation.RotatedRight: temp = point.y; point.y = imageWidth - point.x; point.x = temp; if (isMirrored) { point.x = imageWidth - point.x; } break; default: if (isMirrored) { point.x = imageHeight - point.x; } break; } } // scale if (_finalScale > 1.0) { point.x *= _finalScale; point.y *= _finalScale; } return(point); }
void Update() { if (CoreXT.IsDevice) { if (webCam.didUpdateThisFrame) { CGImageOrientation orientation = CGImageOrientation.RotatedLeft; switch (webCam.videoRotationAngle) { case 0: orientation = CGImageOrientation.Default; break; case 90: orientation = CGImageOrientation.RotatedLeft; break; case 180: orientation = CGImageOrientation.UpsideDown; break; case 270: orientation = CGImageOrientation.RotatedRight; break; } var ciimage = new CIImage(CGImage.FromWebCamTexture(webCam)); faceDetector.ProjectedScale = Screen.width / webCam.width; faces = faceDetector.DetectInImage(ciimage, orientation); // foreach (var face in faces) { // Log("face: " + face.Bounds + ", " + face.HasMouthPosition + ", " + face.LeftEyePosition + ", " + face.RightEyePosition); // } if (faces.Length == 1) { var face = faces[0]; if (face.Bounds.center.x < (Screen.width / 2)) { GameObject.Find("Main Camera").GetComponent <Main>().rotateArmToLeft(); } else { GameObject.Find("Main Camera").GetComponent <Main>().rotateArmToRight(); } // var newX = (face.Bounds.center.x / Screen.width) * diffX + minX; // var newZ = (face.Bounds.center.y / Screen.height) * diffY + minY; // var newY = (face.Bounds.width / diffFaceWidth) * diffFaceWidth + maxFaceWidth; // GameObject.Find("Main Camera").camera.transform.position = new Vector3(newX, newY, newZ); // GameObject.Find("Main Camera").GetComponent<Main>().Log("face: " + face.Bounds.center.x + ", " + face.Bounds.center.y + ", " + face.Bounds.width + ", " + face.Bounds.height // + "; " + newX + ", " + newY + ", " + newZ); } } } }
void Update() { if (CoreXT.IsDevice) { if (webCam.didUpdateThisFrame) { CGImageOrientation orientation = CGImageOrientation.RotatedLeft; switch (webCam.videoRotationAngle) { case 0: orientation = CGImageOrientation.Default; break; case 90: orientation = CGImageOrientation.RotatedLeft; break; case 180: orientation = CGImageOrientation.UpsideDown; break; case 270: orientation = CGImageOrientation.RotatedRight; break; } var ciimage = new CIImage(CGImage.FromWebCamTexture(webCam)); faceDetector.ProjectedScale = Screen.width / webCam.width; faces = faceDetector.DetectInImage(ciimage, orientation); foreach (var face in faces) { Log("face: " + face.Bounds + ", " + face.HasMouthPosition + ", " + face.LeftEyePosition + ", " + face.RightEyePosition); } } } }
private Rect _FixRect(Rect rect, int imageHeight, int imageWidth, CGImageOrientation imageOrientation) { rect.y = imageHeight - rect.y - rect.height; // rotate coords if (correctOrientation) { float temp; switch (imageOrientation) { case CGImageOrientation.UpsideDown: rect.x = imageWidth - rect.x - rect.width; rect.y = imageHeight - rect.y - rect.height; if (isMirrored) { rect.x = imageHeight - rect.x; } break; case CGImageOrientation.RotatedLeft: temp = rect.x; rect.x = imageHeight - rect.y - rect.height; rect.y = temp; temp = rect.width; rect.width = rect.height; rect.height = temp; if (isMirrored) { rect.x = imageWidth - rect.x; } break; case CGImageOrientation.RotatedRight: temp = rect.y; rect.y = imageWidth - rect.x - rect.width; rect.x = temp; temp = rect.width; rect.width = rect.height; rect.height = temp; if (isMirrored) { rect.x = imageWidth - rect.x; } break; default: if (isMirrored) { rect.x = imageHeight - rect.x; } break; } } // scale if (_finalScale > 1.0) { rect.x *= _finalScale; rect.y *= _finalScale; rect.width *= _finalScale; rect.height *= _finalScale; } return(rect); }
private Rect _FixRect(Rect rect, int imageHeight, int imageWidth, CGImageOrientation imageOrientation) { rect.y = imageHeight - rect.y - rect.height; // rotate coords if (correctOrientation) { float temp; switch (imageOrientation) { case CGImageOrientation.UpsideDown: rect.x = imageWidth - rect.x - rect.width; rect.y = imageHeight - rect.y - rect.height; if (isMirrored) rect.x = imageWidth - rect.x - rect.width; break; case CGImageOrientation.RotatedLeft: temp = rect.x; rect.x = imageHeight - rect.y - rect.height; rect.y = temp; temp = rect.width; rect.width = rect.height; rect.height = temp; if (isMirrored) rect.x = imageHeight - rect.x - rect.width; break; case CGImageOrientation.RotatedRight: temp = rect.y; rect.y = imageWidth - rect.x - rect.width; rect.x = temp; temp = rect.width; rect.width = rect.height; rect.height = temp; if (isMirrored) rect.x = imageHeight - rect.x - rect.width; break; default: if (isMirrored) rect.x = imageWidth - rect.x - rect.width; break; } } // scale if (_finalScale > 1.0) { rect.x *= _finalScale; rect.y *= _finalScale; rect.width *= _finalScale; rect.height *= _finalScale; } return rect; }
private Vector2 _FixPoint(Vector2 point, int imageHeight, int imageWidth, CGImageOrientation imageOrientation) { point.y = imageHeight - point.y; // rotate coords if (correctOrientation) { float temp; switch (imageOrientation) { case CGImageOrientation.UpsideDown: point.x = imageWidth - point.x; point.y = imageHeight - point.y; if (isMirrored) point.x = imageWidth - point.x; break; case CGImageOrientation.RotatedLeft: temp = point.x; point.x = imageHeight - point.y; point.y = temp; if (isMirrored) point.x = imageHeight - point.x; break; case CGImageOrientation.RotatedRight: temp = point.y; point.y = imageWidth - point.x; point.x = temp; if (isMirrored) point.x = imageHeight - point.x; break; default: if (isMirrored) point.x = imageWidth - point.x; break; } } // scale if (_finalScale > 1.0) { point.x *= _finalScale; point.y *= _finalScale; } return point; }
/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="data">Pixel data.</param> /// <param name="width">Width of image.</param> /// <param name="height">Height of image.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInPixels32(Color32[] data, int width, int height, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var cgimage = CGImage.FromPixels32(data, width, height, preprocessImageScale); var ciimage = new CIImage(cgimage); return DetectInImage(ciimage, imageOrientation); }
/* public void DetectInPixels32Async(Action<Face[]> callback, Color32[] data, int width, int height, CGImageOrientation imageOrientation = CGImageOrientation.Default) { if (_opQueue == null) _opQueue = new NSOperationQueue(); var cgimage = CGImage.FromPixels32(data, width, height, preprocessImageScale); data = null; _opQueue.AddOperation(delegate() { var ciimage = new CIImage(cgimage); cgimage = null; Face[] faces = DetectInImage(ciimage, imageOrientation); ciimage = null; CoreXT.RunOnMainThread(delegate() { callback(faces); faces = null; }); }); } */ /// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="ciimage">Ciimage.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(CIImage ciimage, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var rect = ciimage.Extent(); int imageHeight = (int)rect.height; int imageWidth = (int)rect.width; _finalScale = projectedScale / preprocessImageScale; // options _imageOpts[CIDetector.ImageOrientation] = (int)imageOrientation; _imageOpts[CIDetector.Smile] = detectSmiles; _imageOpts[CIDetector.EyeBlink] = detectBlinks; // detect var features = _detector.FeaturesInImage(ciimage, _imageOpts); // go through features and transform coords var faces = new Face[features.Length]; for (int i=0; i<features.Length; i++) { var feature = features[i] as CIFaceFeature; var face = new Face(); face.bounds = _FixRect(feature.bounds, imageHeight, imageWidth, imageOrientation); if (feature.hasMouthPosition) { face.hasMouthPosition = true; face.mouthPosition = _FixPoint(feature.mouthPosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasLeftEyePosition) { face.hasLeftEyePosition = true; face.leftEyePosition = _FixPoint(feature.leftEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasRightEyePosition) { face.hasRightEyePosition = true; face.rightEyePosition = _FixPoint(feature.rightEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.RespondsToSelector("trackingID")) { if (feature.hasTrackingID) { face.hasTrackingID = true; face.trackingID = feature.trackingID; } if (feature.hasTrackingFrameCount) { face.hasTrackingFrameCount = true; face.trackingFrameCount = feature.trackingFrameCount; } } if (feature.RespondsToSelector("faceAngle")) { if (feature.hasFaceAngle) { face.hasFaceAngle = true; face.faceAngle = feature.faceAngle; } face.hasSmile = feature.hasSmile; face.leftEyeClosed = feature.leftEyeClosed; face.rightEyeClosed = feature.rightEyeClosed; } faces[i] = face; } return faces; }
/* * public void DetectInPixels32Async(Action<Face[]> callback, Color32[] data, int width, int height, CGImageOrientation imageOrientation = CGImageOrientation.Default) { * if (_opQueue == null) * _opQueue = new NSOperationQueue(); * * var cgimage = CGImage.FromPixels32(data, width, height, preprocessImageScale); * data = null; * * _opQueue.AddOperation(delegate() { * var ciimage = new CIImage(cgimage); * cgimage = null; * Face[] faces = DetectInImage(ciimage, imageOrientation); * ciimage = null; * * CoreXT.RunOnMainThread(delegate() { * callback(faces); * faces = null; * }); * }); * } */ /// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="ciimage">Ciimage.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(CIImage ciimage, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var rect = ciimage.Extent(); int imageHeight = (int)rect.height; int imageWidth = (int)rect.width; _finalScale = projectedScale / preprocessImageScale; // options _imageOpts[CIDetector.ImageOrientation] = (int)imageOrientation; _imageOpts[CIDetector.Smile] = detectSmiles; _imageOpts[CIDetector.EyeBlink] = detectBlinks; // detect var features = _detector.FeaturesInImage(ciimage, _imageOpts); // go through features and transform coords var faces = new Face[features.Length]; for (int i = 0; i < features.Length; i++) { var feature = features[i] as CIFaceFeature; var face = new Face(); face.bounds = _FixRect(feature.bounds, imageHeight, imageWidth, imageOrientation); if (feature.hasMouthPosition) { face.hasMouthPosition = true; face.mouthPosition = _FixPoint(feature.mouthPosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasLeftEyePosition) { face.hasLeftEyePosition = true; face.leftEyePosition = _FixPoint(feature.leftEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasRightEyePosition) { face.hasRightEyePosition = true; face.rightEyePosition = _FixPoint(feature.rightEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.RespondsToSelector("trackingID")) { if (feature.hasTrackingID) { face.hasTrackingID = true; face.trackingID = feature.trackingID; } if (feature.hasTrackingFrameCount) { face.hasTrackingFrameCount = true; face.trackingFrameCount = feature.trackingFrameCount; } } if (feature.RespondsToSelector("faceAngle")) { if (feature.hasFaceAngle) { face.hasFaceAngle = true; face.faceAngle = feature.faceAngle; } face.hasSmile = feature.hasSmile; face.leftEyeClosed = feature.leftEyeClosed; face.rightEyeClosed = feature.rightEyeClosed; } faces[i] = face; } return(faces); }
/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="image">Image.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(Texture2D image, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var cgimage = CGImage.FromTexture2D(image); var ciimage = new CIImage(cgimage); // cgimage.Release(); return DetectInImage(ciimage, imageOrientation); }
/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="ciimage">Ciimage.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(CIImage ciimage, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var rect = ciimage.Extent(); int imageHeight = (int)(rect.height * PreprocessImageScale); int imageWidth = (int)(rect.width * PreprocessImageScale); _finalScale = ProjectedScale / PreprocessImageScale; // create CIImage from bitmapdata // var ciimage:CIImage = CIImage.fromBitmapData(image, preprocessImageScale); //TODO // var ciimage = new CIImage(CGImage.FromTexture2D(image)); // orientation settings _imageOpts[CIDetector.ImageOrientation] = (int)imageOrientation; // detect var features = _detector.FeaturesInImage(ciimage, _imageOpts); // go through features and transform coords var faces = new Face[features.Length]; for (int i=0; i<features.Length; i++) { var feature = features[i] as CIFaceFeature; var face = new Face(); face.bounds = _FixRect(feature.bounds, imageHeight, imageWidth, imageOrientation); if (feature.hasMouthPosition) { face.hasMouthPosition = true; face.mouthPosition = _FixPoint(feature.mouthPosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasLeftEyePosition) { face.hasLeftEyePosition = true; face.leftEyePosition = _FixPoint(feature.leftEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasRightEyePosition) { face.hasRightEyePosition = true; face.rightEyePosition = _FixPoint(feature.rightEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.RespondsToSelector("trackingID")) { if (feature.hasTrackingID) { face.hasTrackingID = true; face.trackingID = feature.trackingID; } if (feature.hasTrackingFrameCount) { face.hasTrackingFrameCount = true; face.trackingFrameCount = feature.trackingFrameCount; } } faces[i] = face; } return faces; }
/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="ciimage">Ciimage.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(CIImage ciimage, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var rect = ciimage.Extent(); int imageHeight = (int)(rect.height * preprocessImageScale); int imageWidth = (int)(rect.width * preprocessImageScale); _finalScale = projectedScale / preprocessImageScale; // create CIImage from bitmapdata // var ciimage:CIImage = CIImage.fromBitmapData(image, preprocessImageScale); //TODO // var ciimage = new CIImage(CGImage.FromTexture2D(image)); // orientation settings _imageOpts[CIDetector.ImageOrientation] = (int)imageOrientation; // detect var features = _detector.FeaturesInImage(ciimage, _imageOpts); // go through features and transform coords var faces = new Face[features.Length]; for (int i = 0; i < features.Length; i++) { var feature = features[i] as CIFaceFeature; var face = new Face(); face.bounds = _FixRect(feature.bounds, imageHeight, imageWidth, imageOrientation); if (feature.hasMouthPosition) { face.hasMouthPosition = true; face.mouthPosition = _FixPoint(feature.mouthPosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasLeftEyePosition) { face.hasLeftEyePosition = true; face.leftEyePosition = _FixPoint(feature.leftEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.hasRightEyePosition) { face.hasRightEyePosition = true; face.rightEyePosition = _FixPoint(feature.rightEyePosition, imageHeight, imageWidth, imageOrientation); } if (feature.RespondsToSelector("trackingID")) { if (feature.hasTrackingID) { face.hasTrackingID = true; face.trackingID = feature.trackingID; } if (feature.hasTrackingFrameCount) { face.hasTrackingFrameCount = true; face.trackingFrameCount = feature.trackingFrameCount; } } faces[i] = face; } return(faces); }
/// <summary> /// Detects the in image. /// </summary> /// <returns>The in image.</returns> /// <param name="image">Image.</param> /// <param name="imageOrientation">Image orientation.</param> public Face[] DetectInImage(Texture2D image, CGImageOrientation imageOrientation = CGImageOrientation.Default) { var cgimage = CGImage.FromTexture2D(image, preprocessImageScale); var ciimage = new CIImage(cgimage); return DetectInImage(ciimage, imageOrientation); }