예제 #1
0
        void DetectLandmarks(CIImage image)
        {
            faceLandmarksDetectionRequest.Perform(new VNRequest [] { faceLandmarks }, image, out var performError);
            var landmarksResults = faceLandmarks?.GetResults <VNFaceObservation> () ?? Array.Empty <VNFaceObservation> ();

            foreach (var observation in landmarksResults)
            {
                DispatchQueue.MainQueue.DispatchAsync(() => {
                    var boundingBox = faceLandmarks.InputFaceObservations.FirstOrDefault()?.BoundingBox;
                    if (boundingBox.HasValue)
                    {
                        //var faceBoundingBox = boundingBox.Value.Scale (View.Bounds.Size);

                        // Different types of landmarks
                        var faceContour = observation.Landmarks.FaceContour;
                        //ConvertPoints (faceContour, faceBoundingBox);
                        ConvertPoints2(faceContour, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var leftEye = observation.Landmarks.LeftEye;
                        //ConvertPoints (leftEye, faceBoundingBox);
                        ConvertPoints2(leftEye, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var rightEye = observation.Landmarks.RightEye;
                        //ConvertPoints (rightEye, faceBoundingBox);
                        ConvertPoints2(rightEye, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var leftEyebrow = observation.Landmarks.LeftEyebrow;
                        //ConvertPoints (leftEyebrow, faceBoundingBox);
                        ConvertPoints2(leftEyebrow, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var rightEyebrow = observation.Landmarks.RightEyebrow;
                        //ConvertPoints (rightEyebrow, faceBoundingBox);
                        ConvertPoints2(rightEyebrow, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var nose = observation.Landmarks.Nose;
                        //ConvertPoints (nose, faceBoundingBox);
                        ConvertPoints2(nose, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var noseCrest = observation.Landmarks.NoseCrest;
                        //ConvertPoints (noseCrest, faceBoundingBox);
                        ConvertPoints2(noseCrest, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var innerLips = observation.Landmarks.InnerLips;
                        //ConvertPoints (innerLips, faceBoundingBox);
                        ConvertPoints2(innerLips, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);

                        var outerLips = observation.Landmarks.OuterLips;
                        ConvertPoints2(outerLips, boundingBox.Value, (nuint)View.Bounds.Width, (nuint)View.Bounds.Height);
                        //ConvertPoints (outerLips, faceBoundingBox);
                    }
                });
            }
        }
예제 #2
0
        void DetectFace(CIImage image)
        {
            faceDetectionRequest.Perform(new VNRequest [] { faceDetection }, image, out var performError);
            var results = faceDetection.GetResults <VNFaceObservation> () ?? Array.Empty <VNFaceObservation> ();

            if (results.Length > 0)
            {
                faceLandmarks.InputFaceObservations = results;
                DetectLandmarks(image);

                DispatchQueue.MainQueue.DispatchAsync(() => shapeLayer.Sublayers = Array.Empty <CALayer> ());
            }
        }
예제 #3
0
        void DetectFaceLandmarks(CIImage imageWithOrientation)
        {
            if (_detectFaceLandmarksRequest == null)
            {
                _detectFaceLandmarksRequest = new VNDetectFaceLandmarksRequest((request, error) =>
                {
                    RemoveSublayers(_shapeLayer);

                    if (error != null)
                    {
                        throw new Exception(error.LocalizedDescription);
                    }

                    var results = request.GetResults <VNFaceObservation>();

                    foreach (var result in results)
                    {
                        if (result.Landmarks == null)
                        {
                            continue;
                        }

                        var boundingBox       = result.BoundingBox;
                        var scaledBoundingBox = Scale(boundingBox, _view.Bounds.Size);

                        InvokeOnMainThread(() =>
                        {
                            DrawLandmark(result.Landmarks.FaceContour, scaledBoundingBox, false, UIColor.White);

                            DrawLandmark(result.Landmarks.LeftEye, scaledBoundingBox, true, UIColor.Green);
                            DrawLandmark(result.Landmarks.RightEye, scaledBoundingBox, true, UIColor.Green);

                            DrawLandmark(result.Landmarks.Nose, scaledBoundingBox, true, UIColor.Blue);
                            DrawLandmark(result.Landmarks.NoseCrest, scaledBoundingBox, false, UIColor.Blue);

                            DrawLandmark(result.Landmarks.InnerLips, scaledBoundingBox, true, UIColor.Yellow);
                            DrawLandmark(result.Landmarks.OuterLips, scaledBoundingBox, true, UIColor.Yellow);

                            DrawLandmark(result.Landmarks.LeftEyebrow, scaledBoundingBox, false, UIColor.Blue);
                            DrawLandmark(result.Landmarks.RightEyebrow, scaledBoundingBox, false, UIColor.Blue);
                        });
                    }
                });
            }

            _sequenceRequestHandler.Perform(new[] { _detectFaceLandmarksRequest }, imageWithOrientation, out var requestHandlerError);
            if (requestHandlerError != null)
            {
                throw new Exception(requestHandlerError.LocalizedDescription);
            }
        }
예제 #4
0
        /// <summary>
        /// Called by `ViewController.OnFrameCaptured` once per frame with the buffer processed by the image-processing pipeline in
        /// `VideoCaptureDelegate.DidOutputSampleBuffer`
        /// </summary>
        /// <param name="buffer">The captured video frame.</param>
        public void OnFrameCaptured(CVPixelBuffer buffer)
        {
            // Run the tracker
            var request = new VNTrackObjectRequest(trackedRectangle, ObjectTracked);

            request.TrackingLevel = VNRequestTrackingLevel.Accurate;
            NSError error;
            var     requests = new[] { request };

            overlay.InvokeOnMainThread(() => overlay.Clear());
            trackingHandler.Perform(requests, buffer, out error);
            if (error != null)
            {
                InvokeOnMainThread(() => overlay.Message = error.ToString());
            }
        }
        /// <summary>
        /// Perform Requests
        /// </summary>
        public void PerformTracking(TrackedObjectType type, out NSError error)
        {
            var videoReader = VideoReader.Create(videoAsset);

            if (videoReader != null)
            {
                if (videoReader.NextFrame() != null)
                {
                    this.cancelRequested = false;

                    // Create initial observations
                    var inputObservations = new Dictionary <NSUuid, VNDetectedObjectObservation>();
                    var trackedObjects    = new Dictionary <NSUuid, TrackedPolyRect>();
                    switch (type)
                    {
                    case TrackedObjectType.Object:
                        foreach (var rect in this.ObjectsToTrack)
                        {
                            var inputObservation = VNDetectedObjectObservation.FromBoundingBox(rect.BoundingBox);
                            inputObservations[inputObservation.Uuid] = inputObservation;
                            trackedObjects[inputObservation.Uuid]    = rect;
                        }
                        break;

                    case TrackedObjectType.Rectangle:
                        foreach (var rectangleObservation in this.initialRectObservations)
                        {
                            inputObservations[rectangleObservation.Uuid] = rectangleObservation;
                            var rectColor = TrackedObjectsPalette.Color(trackedObjects.Count);
                            trackedObjects[rectangleObservation.Uuid] = new TrackedPolyRect(rectangleObservation, rectColor);
                        }
                        break;
                    }

                    var requestHandler = new VNSequenceRequestHandler();
                    var frames         = 1;
                    var trackingFailedForAtLeastOneObject = false;

                    CVPixelBuffer frame = null;
                    while (true)
                    {
                        if (this.cancelRequested || (frame = videoReader.NextFrame()) == null)
                        {
                            break;
                        }

                        this.Delegate?.DisplayFrameCounter(frames);
                        frames += 1;

                        var rects            = new List <TrackedPolyRect>();
                        var trackingRequests = new List <VNRequest>();
                        foreach (var inputObservation in inputObservations)
                        {
                            VNTrackingRequest request = null;
                            switch (type)
                            {
                            case TrackedObjectType.Object:
                                request = new VNTrackObjectRequest(inputObservation.Value);
                                break;

                            case TrackedObjectType.Rectangle:
                                if (inputObservation.Value is VNRectangleObservation rectObservation)
                                {
                                    request = new VNTrackRectangleRequest(rectObservation);
                                }
                                else
                                {
                                    continue;
                                }
                                break;
                            }

                            request.TrackingLevel = this.TrackingLevel;
                            trackingRequests.Add(request);
                        }

                        // Perform array of requests
                        requestHandler.Perform(trackingRequests.ToArray(), frame, videoReader.Orientation, out NSError performError);
                        trackingFailedForAtLeastOneObject = performError != null;

                        foreach (var processedRequest in trackingRequests)
                        {
                            var results = processedRequest.GetResults <VNObservation>();
                            if (results == null || !results.Any())
                            {
                                continue;
                            }

                            if (results.FirstOrDefault() is VNDetectedObjectObservation observation)
                            {
                                // Assume threshold = 0.5f
                                var rectStyle = observation.Confidence > 0.5f ? TrackedPolyRectStyle.Solid : TrackedPolyRectStyle.Dashed;
                                var knownRect = trackedObjects[observation.Uuid];

                                switch (type)
                                {
                                case TrackedObjectType.Object:
                                    rects.Add(new TrackedPolyRect(observation, knownRect.Color, rectStyle));
                                    break;

                                case TrackedObjectType.Rectangle:
                                    if (observation is VNRectangleObservation rectObservation)
                                    {
                                        rects.Add(new TrackedPolyRect(rectObservation, knownRect.Color, rectStyle));
                                    }
                                    break;
                                }

                                // Initialize inputObservation for the next iteration
                                inputObservations[observation.Uuid] = observation;
                            }
                        }

                        // Draw results
                        this.Delegate?.DisplayFrame(frame, videoReader.AffineTransform, rects);

                        var miliseconds = videoReader.FrameRateInSeconds / 1000;
                        System.Threading.Thread.Sleep((int)miliseconds);
                    }

                    this.Delegate?.DidFinifshTracking();

                    error = trackingFailedForAtLeastOneObject ? new VisionTrackerProcessorError(VisionTrackerProcessorErrorType.FirstFrameReadFailed)
                                                              : null;
                }
                else
                {
                    error = new VisionTrackerProcessorError(VisionTrackerProcessorErrorType.FirstFrameReadFailed);
                }
            }
            else
            {
                error = new VisionTrackerProcessorError(VisionTrackerProcessorErrorType.ReaderInitializationFailed);
            }
        }