Ejemplo n.º 1
0
        private void HandleRectangles(VNRequest request, NSError error)
        {
            var observations = request.GetResults <VNRectangleObservation>();

            if (observations == null)
            {
                ShowAlert("Processing Error", "Unexpected result type from VNDetectRectanglesRequest.");
                return;
            }
            if (observations.Length < 1)
            {
                DispatchQueue.MainQueue.DispatchAsync(() => {
                    ClassificationLabel.Text = "No rectangles detected.";
                });
                return;
            }
            var detectedRectangle = observations[0];
            var imageSize         = InputImage.Extent.Size;

            // Verify detected rectangle is valid.
            var boundingBox = detectedRectangle.BoundingBox.Scaled(imageSize);

            if (!InputImage.Extent.Contains(boundingBox))
            {
                DispatchQueue.MainQueue.DispatchAsync(() => {
                    ClassificationLabel.Text = "Invalid rectangle detected.";
                });
                return;
            }

            // Rectify the detected image and reduce it to inverted grayscale for applying model.
            var topLeft     = detectedRectangle.TopLeft.Scaled(imageSize);
            var topRight    = detectedRectangle.TopRight.Scaled(imageSize);
            var bottomLeft  = detectedRectangle.BottomLeft.Scaled(imageSize);
            var bottomRight = detectedRectangle.BottomRight.Scaled(imageSize);

            var correctedImage = InputImage.ImageByCroppingToRect(boundingBox);

            var fp1 = new Dictionary <string, CGPoint>()
            {
                { "inputTopLeft", topLeft },
                { "inputTopRight", topRight },
                { "inputBottomLeft", bottomLeft },
                { "inputBottomRight", bottomRight }
            };

            correctedImage = correctedImage.CreateByFiltering("CIPerspectiveCorrection", fp1.ToNSDictionary());

            var fp2 = new Dictionary <NSString, NSNumber>()
            {
                { CIFilterInputKey.Saturation, new NSNumber(0) },
                { CIFilterInputKey.Contrast, new NSNumber(32) }
            };

            correctedImage = correctedImage.CreateByFiltering("CIColorControls", fp2.ToNSDictionary());

            var fp3 = new Dictionary <NSString, NSNumber>();

            correctedImage = correctedImage.CreateByFiltering("CIColorInvert", fp3.ToNSDictionary());

            // Show the pre-processed image
            DispatchQueue.MainQueue.DispatchAsync(() =>
            {
                ClassificationLabel.Text = "Selected First Rectangle";
                CorrectedImageView.Image = new UIImage(correctedImage);
            });

            // Run the Core ML MNIST classifier -- results in handleClassification method
            var handler = new VNImageRequestHandler(correctedImage, new VNImageOptions());

            DispatchQueue.DefaultGlobalQueue.DispatchAsync(() => {
                NSError err;
                handler.Perform(new VNRequest[] { ClassificationRequest }, out err);
            });
        }
 public static void TrackPageView(string pageName, int duration, Dictionary<string, string> properties)
 {
     MSAITelemetryManager.TrackPageView(pageName, duration, properties.ToNSDictionary());
 }
 public static void TrackTrace(string message, Dictionary<string, string> properties)
 {
     MSAITelemetryManager.TrackTrace(message, properties.ToNSDictionary());
 }
 public static void TrackMetric(string metricName, double value, Dictionary<string, string> properties)
 {
     MSAITelemetryManager.TrackMetric(metricName, value, properties.ToNSDictionary());
 }
 public static void TrackEvent(string eventName, Dictionary<string, string> properties)
 {
     MSAITelemetryManager.TrackEvent(eventName, properties.ToNSDictionary());
 }