Пример #1
0
        public async Task<bool> Recognise (CIImage image)
        {
            if (image == null)
                throw new ArgumentNullException ("image");
            if (_busy)
                return false;
            _busy = true;
            try {
                return await Task.Run (() => {
                    using (var blur = new CIGaussianBlur ())
                    using (var context = CIContext.Create ()) {
                        blur.SetDefaults ();
                        blur.Image = image;
                        blur.Radius = 0;
                        using (var outputCiImage = context.CreateCGImage (blur.OutputImage, image.Extent))
                        using (var newImage = new UIImage (outputCiImage)) {
                            _api.Image = newImage;
                            _api.Recognize ();
                            return true;
                        }
                    }

                });
            } finally {
                _busy = false;
            }
        }
Пример #2
0
        public async Task<bool> Recognise (CIImage image)
        {
            CheckIfInitialized ();
            if (image == null)
                throw new ArgumentNullException ("image");
            if (_busy)
                return false;
            _busy = true;
            try {
                return await Task.Run (() => {
                    using (var blur = new CIGaussianBlur ())
                    using (var context = CIContext.Create ()) {
                        blur.SetDefaults ();
                        blur.Image = image;
                        blur.Radius = 0;
                        using (var outputCiImage = context.CreateCGImage (blur.OutputImage, image.Extent))
                        using (var newImage = new UIImage (outputCiImage)) {
                            _size = newImage.Size;
                            _api.Image = newImage;
                            if (_rect.HasValue) {
                                _api.Rect = new CGRect (_rect.Value.Left, _rect.Value.Top, _rect.Value.Width, _rect.Value.Height);
                            } else {
                                _api.Rect = new CGRect (0, 0, _size.Width, _size.Height);
                            }
                            _api.Recognize ();
                            return true;
                        }
                    }

                });
            } finally {
                _busy = false;
            }
        }
Пример #3
0
		private static UIImage CreateBlurImage (UIImage image)
		{
			using (CIImage inputImage = new CIImage (image))
			using (CIGaussianBlur blur = new CIGaussianBlur ())
			using (CIContext context = CIContext.FromOptions (new CIContextOptions { UseSoftwareRenderer = false })) {
				blur.Image = inputImage;
				blur.Radius = 3;
				using (CIImage outputImage = blur.OutputImage)
				using (CIImage cgImage = context.CreateCGImage (outputImage, new CGRect (new CGPoint (0, 0), image.Size))) {
					return UIImage.FromImage (cgImage);
				}
			}
		}
Пример #4
0
        public void CIKernel_BasicTest()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Inconclusive("Custom filters require iOS8+");
            }

            Exception ex = null;
            var       t  = new Thread(() => {
                // This code will crash if an MKMapView has been created previously on
                // the same thread, so just run it on a different thread (MKMapViews can
                // only be created on the main thread). This is obviously an Apple bug,
                // and a radar has been filed: 19249153. ObjC test case: https://github.com/rolfbjarne/CIKernelMKMapViewCrash
                try {
                    UIImage uiImg = new UIImage("CoreImage/Xam.png");
                    CIImage ciImg = new CIImage(uiImg);

                    CIContext context = CIContext.FromOptions(null);

                    foreach (CustomerFilterType type in Enum.GetValues(typeof(CustomerFilterType)))
                    {
                        MyCustomFilter filter = new MyCustomFilter(type);
                        filter.MyImage        = ciImg;

                        CIImage outputImage = filter.OutputImage;

                        CGImage cgImage  = context.CreateCGImage(outputImage, outputImage.Extent);
                        UIImage finalImg = new UIImage(cgImage);
                        Assert.IsNotNull(finalImg, "CIKernel_BasicTest should not be null");
                        Assert.IsTrue(filter.CallbackHit, "CIKernel_BasicTest callback must be hit");
                        if (filter.IsColorKernel)
                        {
                            Assert.IsTrue(filter.kernel is CIColorKernel, "CIKernel_BasicTest we disagree that it is a color kernel");
                        }
                        else
                        {
                            Assert.IsTrue(filter.kernel is CIWarpKernel, "CIKernel_BasicTest we disagree that it is a warp kernel");
                        }
                    }
                } catch (Exception ex2) {
                    ex = ex2;
                }
            });

            t.Start();
            t.Join();
            if (ex != null)
            {
                throw ex;
            }
        }
Пример #5
0
        public static NSImage Tint(this NSImage image, NSColor tint)
        {
            CIFilter colorGenerator = CIFilter.FromName("CIConstantColorGenerator");
            CIColor  color          = CIColor.FromCGColor(tint.ToCG());

            colorGenerator.SetValueForKey(color, CIFilterInputKey.Color);
            CIFilter colorFilter = CIFilter.FromName("CIColorControls");

            colorFilter.SetValueForKey(colorGenerator.ValueForKey(CIFilterOutputKey.Image), CIFilterInputKey.Image);
            colorFilter.SetValueForKey(NSNumber.FromFloat(3f), CIFilterInputKey.Saturation);
            colorFilter.SetValueForKey(NSNumber.FromFloat(0.35f), CIFilterInputKey.Brightness);
            colorFilter.SetValueForKey(NSNumber.FromFloat(1f), CIFilterInputKey.Contrast);

            CIFilter monochromeFilter = CIFilter.FromName("CIColorMonochrome");
            CIImage  baseImage        = CIImage.FromCGImage(image.CGImage);

            monochromeFilter.SetValueForKey(baseImage, CIFilterInputKey.Image);
            monochromeFilter.SetValueForKey(CIColor.FromRgb(0.75f, 0.75f, 0.75f), CIFilterInputKey.Color);
            monochromeFilter.SetValueForKey(NSNumber.FromFloat(1f), CIFilterInputKey.Intensity);

            CIFilter compositingFilter = CIFilter.FromName("CIMultiplyCompositing");

            compositingFilter.SetValueForKey(colorFilter.ValueForKey(CIFilterOutputKey.Image), CIFilterInputKey.Image);
            compositingFilter.SetValueForKey(monochromeFilter.ValueForKey(CIFilterOutputKey.Image), CIFilterInputKey.BackgroundImage);

            CIImage outputImage = (CIImage)compositingFilter.ValueForKey(CIFilterOutputKey.Image);
            var     extent      = outputImage.Extent;

            var newsize = sd.Size.Truncate(extent.Size);

            var tintedImage = new NSImage(newsize);
            var newrep      = new NSBitmapImageRep(IntPtr.Zero, newsize.Width, newsize.Height, 8, 4, true, false, NSColorSpace.DeviceRGB, 4 * newsize.Width, 32);

            tintedImage.AddRepresentation(newrep);

            var graphics = NSGraphicsContext.FromBitmap(newrep);

            NSGraphicsContext.GlobalSaveGraphicsState();
            NSGraphicsContext.CurrentContext = graphics;

            var ciContext = CIContext.FromContext(graphics.GraphicsPort, new CIContextOptions {
                UseSoftwareRenderer = true
            });

            ciContext.DrawImage(outputImage, extent, extent);

            NSGraphicsContext.GlobalRestoreGraphicsState();

            newrep.Size = image.Size;
            return(tintedImage);
        }
Пример #6
0
        NSImage ImageFromSampleBuffer(CMSampleBuffer sampleBuffer)
#endif
        {
#if __IOS__
            UIImage image;
#else
            NSImage image;
#endif

            Random r    = new Random();
            int    flag = r.Next();
            // Get the CoreVideo image
            using (CVPixelBuffer pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
            {
                // Lock the base address
                pixelBuffer.Lock(CVPixelBufferLock.ReadOnly);
                using (CIImage cIImage = new CIImage(pixelBuffer))
                {
#if __IOS__
                    image = new UIImage(cIImage);
#else
                    image = null;

                    AutoResetEvent e = new AutoResetEvent(false);
                    //e.WaitOne();

                    //Semaphore s = new Semaphore(1, 1);
                    //s.WaitOne();
                    BeginInvokeOnMainThread(delegate
                    {
                        NSCIImageRep rep = new NSCIImageRep(cIImage);
                        Debug.WriteLine(String.Format("({2}) NSCIImageRep: {0}x{1}", rep.Size.Width, rep.Size.Height, flag));
                        image = new NSImage(rep.Size);
                        image.AddRepresentation(rep);
                        //s.Release();
                        //Monitor.Exit(this);
                        Debug.WriteLine(String.Format("({2}) NSImage: {0}x{1}", image.Size.Width, image.Size.Height, flag));
                        e.Set();
                    });
                    e.WaitOne();
                    //Monitor.Enter(this);
                    //Monitor.Exit(this);
                    //s.WaitOne();
                    //s.Release();
#endif
                }
                pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly);
            }
            Debug.WriteLine(String.Format("({2}) Recevied NSImage: {0}x{1}", image.Size.Width, image.Size.Height, flag));
            return(image);
        }
Пример #7
0
        void DetectFaceLandmarks(CIImage imageWithOrientation)
        {
            if (_detectFaceLandmarksRequest == null)
            {
                _detectFaceLandmarksRequest = new VNDetectFaceLandmarksRequest((request, error) =>
                {
                    RemoveSublayers(_shapeLayer);

                    if (error != null)
                    {
                        throw new Exception(error.LocalizedDescription);
                    }

                    var results = request.GetResults <VNFaceObservation>();

                    foreach (var result in results)
                    {
                        if (result.Landmarks == null)
                        {
                            continue;
                        }

                        var boundingBox       = result.BoundingBox;
                        var scaledBoundingBox = Scale(boundingBox, _view.Bounds.Size);

                        InvokeOnMainThread(() =>
                        {
                            DrawLandmark(result.Landmarks.FaceContour, scaledBoundingBox, false, UIColor.White);

                            DrawLandmark(result.Landmarks.LeftEye, scaledBoundingBox, true, UIColor.Green);
                            DrawLandmark(result.Landmarks.RightEye, scaledBoundingBox, true, UIColor.Green);

                            DrawLandmark(result.Landmarks.Nose, scaledBoundingBox, true, UIColor.Blue);
                            DrawLandmark(result.Landmarks.NoseCrest, scaledBoundingBox, false, UIColor.Blue);

                            DrawLandmark(result.Landmarks.InnerLips, scaledBoundingBox, true, UIColor.Yellow);
                            DrawLandmark(result.Landmarks.OuterLips, scaledBoundingBox, true, UIColor.Yellow);

                            DrawLandmark(result.Landmarks.LeftEyebrow, scaledBoundingBox, false, UIColor.Blue);
                            DrawLandmark(result.Landmarks.RightEyebrow, scaledBoundingBox, false, UIColor.Blue);
                        });
                    }
                });
            }

            _sequenceRequestHandler.Perform(new[] { _detectFaceLandmarksRequest }, imageWithOrientation, out var requestHandlerError);
            if (requestHandlerError != null)
            {
                throw new Exception(requestHandlerError.LocalizedDescription);
            }
        }
Пример #8
0
        /// <summary>
        /// Callback method for when a new image is recieved from the video.
        /// </summary>
        public override void DidOutputSampleBuffer(AVCaptureOutput captureOutput, CMSampleBuffer sampleBuffer, AVCaptureConnection connection)
        {
            var imageBuffer   = sampleBuffer.GetImageBuffer();
            var ciImage       = new CIImage(imageBuffer);
            var cgImage       = this.context.CreateCGImage(ciImage, ciImage.Extent);
            var current_color = ColorProcessor.AverageColor(cgImage);


            if (musicProcessor == null)
            {
                Console.WriteLine("Music processor is null. Waiting for init.");
            }
            else if (!recording)
            {
                musicProcessor.PlayColor(current_color, previous_color);
            }

            previous_color = current_color;

            if (!recording)
            {
                // we intentionally do not await these methods since it would
                // prohibit the rest of the app from working.
                // these methods are executed depending on the durationn of
                // the API call to Azure.

                // we handle the busy part here since if we handled it in PlayObjects
                // it would overcrowd the threads for async calls.

                if (generalObjectRecognition && !objectsProcessor.busy)
                {
                    objectsProcessor.PlayObjects(cgImage);
                }

                if (customObjectRecognition && !customObjectsProcessor.busy)
                {
                    customObjectsProcessor.PlayObjects(cgImage);
                }
            }

            BeginInvokeOnMainThread(async() =>
            {
                // we need to change the background color on the main thread
                // this thread is the UI thred
                background.BackgroundColor = current_color;
            });

            // we dispose the sample buffer since we are not able to process
            // all the images and we would overcrowd
            sampleBuffer.Dispose();
        }
Пример #9
0
 public static UIImage InvertColors(this UIImage img)
 {
     using (var coreImg = new CIImage(img.CGImage))
     {
         var filter = new CIColorInvert
         {
             Image = coreImg
         };
         var output  = filter.OutputImage;
         var ctx     = CIContext.FromOptions(null);
         var cgimage = ctx.CreateCGImage(output, output.Extent);
         return(UIImage.FromImage(cgimage));
     }
 }
Пример #10
0
 public static UIImage ToFilter(UIImage source, CIFilter filter)
 {
     using (var context = CIContext.FromOptions(new CIContextOptions {
         UseSoftwareRenderer = false
     }))
         using (var inputImage = CIImage.FromCGImage(source.CGImage))
         {
             filter.Image = inputImage;
             using (var resultImage = context.CreateCGImage(filter.OutputImage, inputImage.Extent))
             {
                 return(new UIImage(resultImage));
             }
         }
 }
Пример #11
0
        public static void Colourize(NSView control, Color color, Action drawAction)
        {
            var size = control.Frame.Size;

            if (size.Width <= 0 || size.Height <= 0)
            {
                return;
            }
            var image = new NSImage(size);

            image.LockFocusFlipped(!control.IsFlipped);
            drawAction();
            image.UnlockFocus();

            var ciImage = CIImage.FromCGImage(image.CGImage);

            CGSize realSize;

            if (control.RespondsToSelector(selConvertSizeToBacking))
            {
                realSize = control.ConvertSizeToBacking(size);
            }
            else
            {
                realSize = control.ConvertSizeToBase(size);
            }

            var filter2 = new CIColorControls();

            filter2.SetDefaults();
            filter2.Image      = ciImage;
            filter2.Saturation = 0.0f;
            ciImage            = (CIImage)filter2.ValueForKey(CIOutputImage);

            var filter3 = new CIColorMatrix();

            filter3.SetDefaults();
            filter3.Image   = ciImage;
            filter3.RVector = new CIVector(0, color.R, 0);
            filter3.GVector = new CIVector(color.G, 0, 0);
            filter3.BVector = new CIVector(0, 0, color.B);
            ciImage         = (CIImage)filter3.ValueForKey(CIOutputImage);

            // create separate context so we can force using the software renderer, which is more than fast enough for this
            var ciContext = CIContext.FromContext(NSGraphicsContext.CurrentContext.GraphicsPort, new CIContextOptions {
                UseSoftwareRenderer = true
            });

            ciContext.DrawImage(ciImage, new CGRect(CGPoint.Empty, size), new CGRect(CGPoint.Empty, realSize));
        }
        CIImage Apply(BlurFilter blurFilter, CIImage input, bool dirty)
        {
            if (blurFilter == null || !blurFilter.Active || !dirty)
            {
                return(null);
            }

            CIFilter filter = new CIGaussianBlur {
                Image  = input,
                Radius = blurFilter.BlurRadius * 50,
            };

            return(filter.OutputImage);
        }
        CIImage Apply(ModifyFilter modifyFilter, CIImage input, bool dirty)
        {
            if (modifyFilter == null || !modifyFilter.Active || !dirty)
            {
                return(null);
            }

            CIFilter filter = new CISepiaTone {
                Image     = input,
                Intensity = modifyFilter.Intensity,
            };

            return(filter.OutputImage);
        }
Пример #14
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="url"></param>
        /// <returns></returns>
        public static NSDictionary GetPhotoLibraryMetadata(NSUrl url)
        {
            NSDictionary meta  = null;
            var          image = PHAsset.FetchAssets(new NSUrl[] { url }, new PHFetchOptions()).firstObject as PHAsset;

            if (image == null)
            {
                return(null);
            }


            try
            {
                var imageManager   = PHImageManager.DefaultManager;
                var requestOptions = new PHImageRequestOptions
                {
                    Synchronous          = true,
                    NetworkAccessAllowed = true,
                    DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                };
                imageManager.RequestImageData(image, requestOptions, (data, dataUti, orientation, info) =>
                {
                    try
                    {
                        var fullimage = CIImage.FromData(data);
                        if (fullimage?.Properties != null)
                        {
                            meta = new NSMutableDictionary
                            {
                                [ImageIO.CGImageProperties.Orientation]    = NSNumber.FromNInt((int)(fullimage.Properties.Orientation ?? CIImageOrientation.TopLeft)),
                                [ImageIO.CGImageProperties.ExifDictionary] = fullimage.Properties.Exif?.Dictionary ?? new NSDictionary(),
                                [ImageIO.CGImageProperties.TIFFDictionary] = fullimage.Properties.Tiff?.Dictionary ?? new NSDictionary(),
                                [ImageIO.CGImageProperties.GPSDictionary]  = fullimage.Properties.Gps?.Dictionary ?? new NSDictionary(),
                                [ImageIO.CGImageProperties.IPTCDictionary] = fullimage.Properties.Iptc?.Dictionary ?? new NSDictionary(),
                                [ImageIO.CGImageProperties.JFIFDictionary] = fullimage.Properties.Jfif?.Dictionary ?? new NSDictionary()
                            };
                        }
                    }
                    catch (Exception ex)
                    {
                        Console.WriteLine(ex);
                    }
                });
            }
            catch
            {
            }

            return(meta);
        }
 public static NSImage ToBlurred(NSImage source, float rad)
 {
     using (var inputImage = CIImage.FromCGImage(source.CGImage))
         using (var filter = new CIGaussianBlur()
         {
             Image = inputImage, Radius = rad
         })
             using (var resultImage = new NSCIImageRep(filter.OutputImage))
             {
                 var nsImage = new NSImage(resultImage.Size);
                 nsImage.AddRepresentation(resultImage);
                 return(nsImage);
             }
 }
Пример #16
0
        public void HighlightShadowAdjust()
        {
            string file = Path.Combine(NSBundle.MainBundle.ResourcePath, "basn3p08.png");

            using (var url = NSUrl.FromFilename(file))
                using (var input = CIImage.FromUrl(url))
                    using (var filter = new CIHighlightShadowAdjust()) {
                        filter.Image           = input;
                        filter.HighlightAmount = 0.75f;
                        filter.ShadowAmount    = 1.5f;
                        // https://bugzilla.xamarin.com/show_bug.cgi?id=15465
                        Assert.NotNull(filter.OutputImage, "OutputImage");
                    }
        }
Пример #17
0
        public static NSData GetJpegRepresentation(this CIImage image, float compressionQuality)
        {
            if (ciContext == null)
            {
                var eaglContext = new EAGLContext(EAGLRenderingAPI.OpenGLES2);
                ciContext = CIContext.FromContext(eaglContext);
            }

            using (CGImage outputImageRef = ciContext.CreateCGImage(image, image.Extent)) {
                using (UIImage uiImage = UIImage.FromImage(outputImageRef, 1f, UIImageOrientation.Up)) {
                    return(uiImage.AsJPEG(compressionQuality));
                }
            }
        }
Пример #18
0
 public static NSImage ToSepia(NSImage source)
 {
     using (var inputImage = CIImage.FromCGImage(source.CGImage))
         using (var filter = new CISepiaTone()
         {
             Image = inputImage
         })
             using (var resultImage = new NSCIImageRep(filter.OutputImage))
             {
                 var nsImage = new NSImage(resultImage.Size);
                 nsImage.AddRepresentation(resultImage);
                 return(nsImage);
             }
 }
Пример #19
0
        public static void DrawWithColorTransform(this NSView view, Color?color, Action drawDelegate)
        {
            if (color.HasValue)
            {
                var size = view.Frame.Size;
                if (size.Width <= 0 || size.Height <= 0)
                {
                    return;
                }

                // render view to image
                var image = new NSImage(size);
                image.LockFocusFlipped(!view.IsFlipped);
                drawDelegate();
                image.UnlockFocus();

                // create Core image for transformation
                var ciImage = CIImage.FromCGImage(image.CGImage);

                CGSize displaySize;
                                #pragma warning disable iOSAndMacApiUsageIssue
                if (view.RespondsToSelector(selConvertSizeToBacking))
                {
                    displaySize = view.ConvertSizeToBacking(size);
                }
                else
                {
                    displaySize = view.ConvertSizeToBase(size);
                }
                                #pragma warning restore iOSAndMacApiUsageIssue

                // apply color matrix
                var transformColor = new CIColorMatrix();
                transformColor.SetDefaults();
                transformColor.Image   = ciImage;
                transformColor.RVector = new CIVector(0, (float)color.Value.Red, 0);
                transformColor.GVector = new CIVector((float)color.Value.Green, 0, 0);
                transformColor.BVector = new CIVector(0, 0, (float)color.Value.Blue);
                using (var key = new NSString("outputImage"))
                    ciImage = (CIImage)transformColor.ValueForKey(key);

                var ciCtx = CIContext.FromContext(NSGraphicsContext.CurrentContext.GraphicsPort, null);
                ciCtx.DrawImage(ciImage, new CGRect(CGPoint.Empty, size), new CGRect(CGPoint.Empty, displaySize));
            }
            else
            {
                drawDelegate();
            }
        }
Пример #20
0
        public static void Colourize(NSView control, Color color, Action drawAction)
        {
            var size  = control.Frame.Size;
            var image = new NSImage(size);

            image.LockFocusFlipped(control.IsFlipped);
            drawAction();
            image.UnlockFocus();

            var ciImage = CIImage.FromData(image.AsTiff());

            if (control.IsFlipped)
            {
                var realSize        = control.ConvertSizeToBase(size);
                var affineTransform = new NSAffineTransform();
                affineTransform.Translate(0, realSize.Height);
                affineTransform.Scale(1, -1);
                var filter1 = CIFilter.FromName("CIAffineTransform");
                filter1.SetValueForKey(ciImage, CIInputImage);
                filter1.SetValueForKey(affineTransform, CIInputTransform);
                ciImage = filter1.ValueForKey(CIOutputImage) as CIImage;
            }

            var filter2 = CIFilter.FromName("CIColorControls");

            filter2.SetDefaults();
            filter2.SetValueForKey(ciImage, CIInputImage);
            filter2.SetValueForKey(new NSNumber(0.0f), CIInputSaturation);
            ciImage = filter2.ValueForKey(CIOutputImage) as CIImage;

            var filter3 = CIFilter.FromName("CIColorMatrix");

            filter3.SetDefaults();
            filter3.SetValueForKey(ciImage, CIInputImage);
            filter3.SetValueForKey(new CIVector(0, color.R, 0), CIInputRVector);
            filter3.SetValueForKey(new CIVector(color.G, 0, 0), CIInputGVector);
            filter3.SetValueForKey(new CIVector(0, 0, color.B), CIInputBVector);
            ciImage = filter3.ValueForKey(CIOutputImage) as CIImage;

            image = new NSImage(size);
            var rep = NSCIImageRep.FromCIImage(ciImage);

            image.AddRepresentation(rep);
            image.Draw(SD.PointF.Empty, new SD.RectangleF(SD.PointF.Empty, size), NSCompositingOperation.SourceOver, 1);

            /* Use this when implemented in maccore:
             * ciImage.Draw (SD.PointF.Empty, new SD.RectangleF (SD.PointF.Empty, size), NSCompositingOperation.SourceOver, 1);
             */
        }
Пример #21
0
 public static UIImage ToBlurred(UIImage source, float rad)
 {
     using (var context = CIContext.FromOptions(new CIContextOptions {
         UseSoftwareRenderer = false
     }))
         using (var inputImage = CIImage.FromCGImage(source.CGImage))
             using (var filter = new CIGaussianBlur()
             {
                 Image = inputImage, Radius = rad
             })
                 using (var resultImage = context.CreateCGImage(filter.OutputImage, inputImage.Extent))
                 {
                     return(new UIImage(resultImage));
                 }
 }
 private static UIImage CreateBlurImage(UIImage image)
 {
     using (CIImage inputImage = new CIImage(image))
         using (CIGaussianBlur blur = new CIGaussianBlur())
             using (CIContext context = CIContext.FromOptions(new CIContextOptions {
                 UseSoftwareRenderer = false
             })) {
                 blur.Image  = inputImage;
                 blur.Radius = 3;
                 using (CIImage outputImage = blur.OutputImage)
                     using (CIImage cgImage = context.CreateCGImage(outputImage, new CGRect(new CGPoint(0, 0), image.Size))) {
                         return(UIImage.FromImage(cgImage));
                     }
             }
 }
Пример #23
0
        public static UIImage CreateScreenshot(this ARSCNView view, UIDeviceOrientation interfaceOrientation)
        {
            if (view.Session.CurrentFrame == null)
            {
                return(null);
            }

            var ciImage = new CIImage(view.Session.CurrentFrame.CapturedImage);

            // TODO: check
            var keys    = new NSString[] { CIFilterInputKey.Scale, CIFilterInputKey.AspectRatio };
            var objects = new NSNumber[] { new NSNumber(0.5f), new NSNumber(1f) };

            var dictionary  = NSDictionary.FromObjectsAndKeys(objects, keys);
            var scaledImage = ciImage.CreateByFiltering("CILanczosScaleTransform", dictionary);

            var context = new CIContext(new CIContextOptions {
                UseSoftwareRenderer = false
            });
            var cgimage = context.CreateCGImage(scaledImage, scaledImage.Extent);

            if (cgimage != null)
            {
                var orientation = UIImageOrientation.Right;
                switch (interfaceOrientation)
                {
                case UIDeviceOrientation.Portrait:
                    orientation = UIImageOrientation.Right;
                    break;

                case UIDeviceOrientation.PortraitUpsideDown:
                    orientation = UIImageOrientation.Left;
                    break;

                case UIDeviceOrientation.LandscapeLeft:
                    orientation = UIImageOrientation.Up;
                    break;

                case UIDeviceOrientation.LandscapeRight:
                    orientation = UIImageOrientation.Down;
                    break;
                }

                return(new UIImage(cgimage, 1f, orientation));
            }

            return(null);
        }
Пример #24
0
        CIImage WillDisplayImage(QTCaptureView view, CIImage image)
        {
            if (description == null)
            {
                return(image);
            }

            var selectedFilter = (NSString)description [filterNameKey];

            var filter = CIFilter.FromName(selectedFilter);

            filter.SetDefaults();
            filter.Image = image;

            return(filter.OutputImage);
        }
Пример #25
0
        public static void ApplySepia(string imagePath, UIImageView imgview)
        {
            var uiimage = UIImage.FromFile (imagePath);
            var ciimage = new CIImage (uiimage);

            var sepia = new CISepiaTone();
            sepia.Image = ciimage;
            sepia.Intensity = 0.8f;
            var output = sepia.OutputImage;

            var context = CIContext.FromOptions(null);
            var cgimage = context.CreateCGImage (output, output.Extent);
            var ui = UIImage.FromImage (cgimage);

            imgview.Image = ui;
        }
Пример #26
0
        public static UIImage Inverted(this UIImage image)
        {
            // Convert to Core Image
            var ciImage = new CIImage(image);

            if (ciImage == null)
            {
                return(null);
            }

            // Apply filter
            ciImage = ciImage.CreateByFiltering("CIColorInvert", new NSDictionary());

            // Return results
            return(new UIImage(ciImage));
        }
Пример #27
0
        public static void ApplyGreyscale(string imagePath, UIImageView imgview)
        {
            var uiimage = UIImage.FromFile (imagePath);
            var ciimage = new CIImage (uiimage);

            var greyscale = new CIColorControls();
            greyscale.Image = ciimage;
            greyscale.Saturation = 0f;
            var output = greyscale.OutputImage;

            var context = CIContext.FromOptions(null);
            var cgimage = context.CreateCGImage (output, output.Extent);
            var ui = UIImage.FromImage (cgimage);

            imgview.Image = ui;
        }
        /// <summary>
        /// Tint the icon with the specified colour
        /// </summary>
        /// <param name="colour">Colour to tint icon with</param>
        public void TintIcon(CIColor colour)
        {
            // Use CoreImage to tint the icon
            var statusImage = CIImage.FromUrl(NSUrl.FromFilename(NSBundle.MainBundle.PathForResource(
                                                                     Path.GetFileNameWithoutExtension(HighlightImagePath), Path.GetExtension(HighlightImagePath))));
            var tintImage = CIImage.ImageWithColor(colour);
            var filter    = CIFilter.FromName("CIMultiplyCompositing");

            filter.SetValueForKey(tintImage, (NSString)"inputImage");
            filter.SetValueForKey(statusImage, (NSString)"inputBackgroundImage");

            var processedImage = (CIImage)filter.ValueForKey((NSString)"outputImage");
            var outputImage    = processedImage.ToNSImage();

            _statusItemView.Image = outputImage;
        }
Пример #29
0
        private void DrawFaces(CIImage image, CGRect cleanAperture)
        {
            if (image == null)
            {
                return;
            }

            var features = faceDetector.FeaturesInImage(image);

            if (features.Count() > 0)
            {
                IsFaceDetected = true;
            }

            DrawFaces(features, cleanAperture, UIDeviceOrientation.Portrait);
        }
Пример #30
0
        /// <summary>
        /// Purchaseable feature. This function can't be called until the
        /// user has purchased the Greyscale Filter in-app product.
        /// </summary>
        public static void ApplyGreyscale(string imagePath, UIImageView imgview)
        {
            var uiimage = UIImage.FromFile(imagePath);
            var ciimage = new CIImage(uiimage);

            var greyscale = new CIColorControls();

            greyscale.Image      = ciimage;
            greyscale.Saturation = 0f;
            var output = greyscale.OutputImage;

            var context = CIContext.FromOptions(null);
            var cgimage = context.CreateCGImage(output, output.Extent);
            var ui      = UIImage.FromImage(cgimage);

            imgview.Image = ui;
        }
Пример #31
0
        public static UIImage ToUIImage(this CMSampleBuffer sampleBuffer)
        {
            UIImage image;

            using (CVPixelBuffer pixelBuffer = sampleBuffer.GetImageBuffer() as CVPixelBuffer)
            {
                // Lock the base address
                pixelBuffer.Lock(CVPixelBufferLock.ReadOnly);
                using (CIImage cIImage = new CIImage(pixelBuffer))
                {
                    image = new UIImage(cIImage);
                }
                pixelBuffer.Unlock(CVPixelBufferLock.ReadOnly);
            }
            //Debug.WriteLine(String.Format("({2}) Received NSImage: {0}x{1}", image.Size.Width, image.Size.Height, flag));
            return(image);
        }
Пример #32
0
        /// <summary>
        /// Resize image maintain aspect ratio
        /// </summary>
        /// <param name="imageSource"></param>
        /// <param name="scale"></param>
        /// <returns></returns>
        public static UIImage ResizeImageWithAspectRatio(this UIImage imageSource, float scale)
        {
            if (scale > 1.0f)
            {
                return(imageSource);
            }


            using (var c = CIContext.Create())
            {
                var sourceImage = CIImage.FromCGImage(imageSource.CGImage);
                var orientation = imageSource.Orientation;
                imageSource?.Dispose();

                CILanczosScaleTransform transform = null;

                /*if(UIDevice.CurrentDevice.CheckSystemVersion(13, 0))
                 * {
                 *      transform = new CILanczosScaleTransform
                 *      {
                 *              Scale = scale,
                 *              InputImage = sourceImage,
                 *              AspectRatio = 1.0f
                 *      };
                 * }
                 * else*/
                //{
                transform = new CILanczosScaleTransform
                {
                    Scale       = scale,
                    Image       = sourceImage,
                    AspectRatio = 1.0f
                };
                //}

                var output = transform.OutputImage;
                using (var cgi = c.CreateCGImage(output, output.Extent))
                {
                    transform?.Dispose();
                    output?.Dispose();
                    sourceImage?.Dispose();

                    return(UIImage.FromImage(cgi, 1.0f, orientation));
                }
            }
        }
 protected override NSImage Transform(NSImage sourceBitmap, string path, Work.ImageSource source, bool isPlaceholder, string key)
 {
     return(Helpers.MainThreadDispatcher.PostForResult <NSImage>(() =>
     {
         using (var inputImage = CIImage.FromCGImage(sourceBitmap.CGImage))
             using (var filter = new CIPhotoEffectMono()
             {
                 Image = inputImage
             })
                 using (var resultImage = new NSCIImageRep(filter.OutputImage))
                 {
                     var nsImage = new NSImage(resultImage.Size);
                     nsImage.AddRepresentation(resultImage);
                     return nsImage;
                 }
     }));
 }
Пример #34
0
        /// <summary>
        /// Purchaseable feature. This function can't be called until the
        /// user has purchased the Sepia Filter in-app product.
        /// </summary>
        public static void ApplySepia(string imagePath, UIImageView imgview)
        {
            var uiimage = UIImage.FromFile(imagePath);
            var ciimage = new CIImage(uiimage);

            var sepia = new CISepiaTone();

            sepia.Image     = ciimage;
            sepia.Intensity = 0.8f;
            var output = sepia.OutputImage;

            var context = CIContext.FromOptions(null);
            var cgimage = context.CreateCGImage(output, output.Extent);
            var ui      = UIImage.FromImage(cgimage);

            imgview.Image = ui;
        }
Пример #35
0
        public void End()
        {
            if (_image == null || _realSize == null)
            {
                return;
            }

            _image.UnlockFocus();

            var ciImage = CIImage.FromCGImage(_image.CGImage);


#pragma warning disable CS0618 // Image => InputImage in Xamarin.Mac 6.6
            var filter2 = new CIColorControls();
            filter2.SetDefaults();
            filter2.Image      = ciImage;
            filter2.Saturation = 0.0f;
            ciImage            = (CIImage)filter2.ValueForKey(CIOutputImage);

            var filter3 = new CIColorMatrix();
            filter3.SetDefaults();
            filter3.Image = ciImage;
#pragma warning restore CS0618

            var cgColor    = Color.ToCG();
            var components = cgColor.Components;
            if (components.Length >= 3)
            {
                filter3.RVector = new CIVector(0, components[0], 0);
                filter3.GVector = new CIVector(components[1], 0, 0);
                filter3.BVector = new CIVector(0, 0, components[2]);
                filter3.AVector = new CIVector(0, 0, 0, cgColor.Alpha);
            }
            ciImage = (CIImage)filter3.ValueForKey(CIOutputImage);

            // create separate context so we can force using the software renderer, which is more than fast enough for this
            var ciContext = CIContext.FromContext(NSGraphicsContext.CurrentContext.GraphicsPort, new CIContextOptions {
                UseSoftwareRenderer = true
            });
            ciContext.DrawImage(ciImage, new CGRect(CGPoint.Empty, _image.Size), new CGRect(CGPoint.Empty, _realSize.Value));

            ciImage.Dispose();
            ciContext.Dispose();
            filter2.Dispose();
            filter3.Dispose();
        }
 void DarkenImage()
 {
     var img = image.Image;
     var ciImage = new CIImage(img);
     var hueAdjust = new CIHueAdjust();   // first filter
     hueAdjust.Image = ciImage;
     hueAdjust.Angle = 2.094f;
     var sepia = new CISepiaTone();       // second filter
     sepia.Image = hueAdjust.OutputImage; // output from last filter, input to this one
     sepia.Intensity = 0.3f;
     CIFilter color = new CIColorControls()
     { // third filter
         Saturation = 2,
         Brightness = 1,
         Contrast = 3,
         Image = sepia.OutputImage    // output from last filter, input to this one
     };
     var output = color.OutputImage;
     var context = CIContext.FromOptions(null);
     // ONLY when CreateCGImage is called do all the effects get rendered
     var cgimage = context.CreateCGImage(output, output.Extent);
     var ui = UIImage.FromImage(cgimage);
     image.Image = ui;
 }
 void ConfigureCIObjects()
 {
     baseCIImage = CIImage.FromCGImage (PhotoView.Image.CGImage);
 }
		/// <summary>
		/// Gets called by the VideoFrameSamplerDelegate if a new image has been captured. Does the rectangle detection.
		/// </summary>
		/// <param name="sender">Sender.</param>
		/// <param name="e">Event arguments</param>
		void HandleImageCaptured(object sender, ImageCaptureEventArgs e)
		{
			// Detect the rectangles in the captured image.
			// Important: case CGImage to CIImage. There is an implicit cast operator from CGImage to CIImage, but if we
			// pass the CGImage in to FeaturesInImage(), many many (implicit) CIImage instance will be created because this 
			// method is called very often. The garbage collector cannot keep up with that and we runn out of memory.
			// By casting manually and using() the CIImage, it will be disposed immediately, freeing up memory.
			using (CIImage inputCIImage = (CIImage)e.Image)
			{
				// Let the detector do its work on the image.
				var rectangles = detector.FeaturesInImage (inputCIImage);

				// Find the biggest rectangle. Note: in my tests I have never seen that more than one rectangle would be detected, but better be prepared.
				nfloat maxWidth = 0f;
				nfloat maxHeight = 0f;
				CIRectangleFeature biggestRect = rectangles.Length > 0 ? (CIRectangleFeature)rectangles [0] : null;

				Console.WriteLine ("Found " + rectangles.Length + " rectangles.");

				foreach(CIRectangleFeature rect in rectangles)
				{
					Console.WriteLine ("Found rect: " + rect);
					nfloat minX = (nfloat)Math.Min (rect.TopLeft.X, rect.BottomLeft.X);
					nfloat minY = (nfloat)Math.Min (rect.TopLeft.Y, rect.TopRight.Y);
					nfloat maxX = (nfloat)Math.Min (rect.TopRight.X, rect.BottomRight.X);
					nfloat maxY = (nfloat)Math.Min (rect.BottomLeft.Y, rect.BottomRight.Y);

					if (maxX - minX > maxWidth && maxY - minY > maxHeight)
					{
						maxWidth = maxX - minX;
						maxHeight = maxY - minY;

						biggestRect = rect;
					}
				}

				if (biggestRect == null)
				{
					this.InvokeOnMainThread (() => {
						this.imageViewOverlay.Image = null;
						this.imageViewPerspective.Image = null;
					});
					return;
				}

				Console.WriteLine ("Highlighting: top left = " + biggestRect.TopLeft + "; top right = " + biggestRect.TopRight + "; bottom left = " + biggestRect.BottomLeft + "; bottom right = " + biggestRect.BottomRight);

				// We are not on the main thread here.
				this.InvokeOnMainThread (() => {

					// Adjust the overlay image to the corners of the detected rectangle with CIPerspectiveTransformWithExtent.
					using(var dict = new NSMutableDictionary ())
					{
						dict.Add (key: new NSString ("inputExtent"), value: new CIVector (inputCIImage.Extent));
						dict.Add (key: new NSString ("inputTopLeft"), value: new CIVector (biggestRect.TopLeft));
						dict.Add (key: new NSString ("inputTopRight"), value: new CIVector (biggestRect.TopRight));
						dict.Add (key: new NSString ("inputBottomLeft"), value: new CIVector (biggestRect.BottomLeft));
						dict.Add (key: new NSString ("inputBottomRight"), value: new CIVector (biggestRect.BottomRight)); 

						// Create a semi-transparent CIImage which will show the detected rectangle.
						using(var overlayCIImage = new CIImage (color: CIColor.FromRgba (red: 1.0f, green: 0f, blue: 0f, alpha: 0.5f))
							// Size it to the source image.
							.ImageByCroppingToRect (inputCIImage.Extent)
							// Apply perspective distortion to the overlay rectangle to map it to the current camera picture.
							.CreateByFiltering ("CIPerspectiveTransformWithExtent", dict)
							// Place overlay on the image.
							.CreateByCompositingOverImage (inputCIImage))
						{
							// Must convert the CIImage into a CGImage and from there into a UIImage. 
							// Could go directly from CIImage to UIImage but when assigning the result to a UIImageView, the ContentMode of
							// the image view will be ignored and no proper aspect scaling will take place.
							using(var ctx = CIContext.FromOptions(null))
							using(CGImage convertedCGImage = ctx.CreateCGImage(overlayCIImage,  overlayCIImage.Extent))
							// This crashes with Xamarin.iOS
							//using(UIImage convertedUIImage = UIImage.FromImage(convertedCGImage, 1f, UIApplication.SharedApplication.StatusBarOrientation == UIInterfaceOrientation.LandscapeLeft ? UIImageOrientation.DownMirrored : UIImageOrientation.UpMirrored))
							// This works.
							using(UIImage convertedUIImage = UIImage.FromImage(convertedCGImage))
							{
								// Show converted image in UI.
								this.imageViewOverlay.Image = convertedUIImage;
							}
						}
					}

					// Apply a perspective correction with CIPerspectiveCorrection to the detected rectangle and display in another UIImageView.
					using(var dict = new NSMutableDictionary ())
					{
						dict.Add (key: new NSString ("inputTopLeft"), value: new CIVector (biggestRect.TopLeft));
						dict.Add (key: new NSString ("inputTopRight"), value: new CIVector (biggestRect.TopRight));
						dict.Add (key: new NSString ("inputBottomLeft"), value: new CIVector (biggestRect.BottomLeft));
						dict.Add (key: new NSString ("inputBottomRight"), value: new CIVector (biggestRect.BottomRight)); 

						// Use again CIImage -> CGImage -> UIImage to prevent scaling issues (see above).
						using(var perspectiveCorrectedImage = inputCIImage.CreateByFiltering ("CIPerspectiveCorrection", dict))
						using(var ctx = CIContext.FromOptions(null))
						using(CGImage convertedCGImage = ctx.CreateCGImage(perspectiveCorrectedImage, perspectiveCorrectedImage.Extent))
						using(UIImage convertedUIImage = UIImage.FromImage(convertedCGImage))
						{
							this.imageViewPerspective.Image = convertedUIImage;
						}
					}
				});
			}
				
			Console.WriteLine ("---------------------");
		}
Пример #39
0
        /// <summary>
        /// Detects the in image.
        /// </summary>
        /// <returns>The in image.</returns>
        /// <param name="ciimage">Ciimage.</param>
        /// <param name="imageOrientation">Image orientation.</param>
        public Face[] DetectInImage(CIImage ciimage, CGImageOrientation imageOrientation = CGImageOrientation.Default)
        {
            var rect = ciimage.Extent();
            int imageHeight = (int)(rect.height * PreprocessImageScale);
            int imageWidth = (int)(rect.width * PreprocessImageScale);
            _finalScale = ProjectedScale / PreprocessImageScale;

            // create CIImage from bitmapdata
            //			var ciimage:CIImage = CIImage.fromBitmapData(image, preprocessImageScale); //TODO
            //			var ciimage = new CIImage(CGImage.FromTexture2D(image));

            // orientation settings
            _imageOpts[CIDetector.ImageOrientation] = (int)imageOrientation;

            // detect
            var features = _detector.FeaturesInImage(ciimage, _imageOpts);

            // go through features and transform coords
            var faces = new Face[features.Length];
            for (int i=0; i<features.Length; i++) {
                var feature = features[i] as CIFaceFeature;
                var face = new Face();

                face.bounds = _FixRect(feature.bounds, imageHeight, imageWidth, imageOrientation);

                if (feature.hasMouthPosition) {
                    face.hasMouthPosition = true;
                    face.mouthPosition = _FixPoint(feature.mouthPosition, imageHeight, imageWidth, imageOrientation);
                }

                if (feature.hasLeftEyePosition) {
                    face.hasLeftEyePosition = true;
                    face.leftEyePosition = _FixPoint(feature.leftEyePosition, imageHeight, imageWidth, imageOrientation);
                }

                if (feature.hasRightEyePosition) {
                    face.hasRightEyePosition = true;
                    face.rightEyePosition = _FixPoint(feature.rightEyePosition, imageHeight, imageWidth, imageOrientation);
                }

                if (feature.RespondsToSelector("trackingID")) {
                    if (feature.hasTrackingID) {
                        face.hasTrackingID = true;
                        face.trackingID = feature.trackingID;
                    }

                    if (feature.hasTrackingFrameCount) {
                        face.hasTrackingFrameCount = true;
                        face.trackingFrameCount = feature.trackingFrameCount;
                    }
                }

                faces[i] = face;
            }

            return faces;
        }
Пример #40
0
 /// <summary>
 /// Sets the input image from a Texture2D object.
 /// </summary>
 /// <param name='input'>
 /// input the input image.
 /// </param>
 public void SetInput(Texture2D input)
 {
     _image = CIImage.FromTexture2D(input);
 }
Пример #41
0
 /// <summary>
 /// Sets the input image from a CIImage object.
 /// </summary>
 /// <param name='input'>
 /// input the input image.
 /// </param>
 public void SetInput(CIImage input)
 {
     _image = input;
 }
 public async void Sample3Png ()
 {
     await _api.Init ("eng");
     using (var stream = LoadSample ("sample3.png"))
     using (var image = new CIImage (NSData.FromStream (stream)))
     using (var blur = new CIGaussianBlur ())
     using (var context = CIContext.Create ()) {
         blur.SetDefaults ();
         blur.Image = image;
         blur.Radius = 0;
         using (var outputCiImage = context.CreateCGImage (blur.OutputImage, image.Extent))
         using (var newImage = new UIImage (outputCiImage)) {
             var result = await ((TesseractApi)_api).Recognise (newImage);
             Assert.IsTrue (result);
             Assert.AreEqual (
                 "the quick brown fox\njumps over the lazy dog-\n\nTHE QUICK BROlLIN FOX\nJUMPS OVER THE LAZY DOG.\n\n",
                 _api.Text);
         }
     }
 }
Пример #43
0
	void Update() {
		if (CoreXT.IsDevice) {
			if (webCam.didUpdateThisFrame) {
				CGImageOrientation orientation = CGImageOrientation.RotatedLeft;
				switch (webCam.videoRotationAngle) {
				case 0:
					orientation = CGImageOrientation.Default;
					break;
				case 90:
					orientation = CGImageOrientation.RotatedLeft;
					break;
				case 180:
					orientation = CGImageOrientation.UpsideDown;
					break;
				case 270:
					orientation = CGImageOrientation.RotatedRight;
					break;
				}
				
				var ciimage = new CIImage(CGImage.FromWebCamTexture(webCam));
				faceDetector.ProjectedScale = Screen.width / webCam.width;
				faces = faceDetector.DetectInImage(ciimage, orientation);
//				foreach (var face in faces) {
//					Log("face: " + face.Bounds + ", " + face.HasMouthPosition + ", " + face.LeftEyePosition + ", " + face.RightEyePosition);
//				}
				
				if (faces.Length == 1) {
					var face = faces[0];
					if (face.Bounds.center.x < (Screen.width / 2))
						GameObject.Find("Main Camera").GetComponent<Main>().rotateArmToLeft();
					else
						GameObject.Find("Main Camera").GetComponent<Main>().rotateArmToRight();
					
					
					
//					var newX = (face.Bounds.center.x / Screen.width) * diffX + minX;
//					var newZ = (face.Bounds.center.y / Screen.height) * diffY + minY;
//					var newY = (face.Bounds.width / diffFaceWidth) * diffFaceWidth + maxFaceWidth;
//					GameObject.Find("Main Camera").camera.transform.position = new Vector3(newX, newY, newZ);
//					GameObject.Find("Main Camera").GetComponent<Main>().Log("face: " + face.Bounds.center.x + ", " + face.Bounds.center.y + ", " + face.Bounds.width + ", " + face.Bounds.height
//						+ "; " + newX + ", " + newY + ", " + newZ);
					
					
					
				}
			}
		}
	}
Пример #44
0
		CIImage WillDisplayImage (QTCaptureView view, CIImage image)
		{
			if (description == null)
				return image;
			
			var selectedFilter = (NSString)description [filterNameKey];

			var filter = CIFilter.FromName (selectedFilter);
			filter.SetDefaults ();
			filter.Image = image;

			return filter.OutputImage;
		}
Пример #45
0
 /// <summary>
 /// Detects the in image.
 /// </summary>
 /// <returns>The in image.</returns>
 /// <param name="data">Pixel data.</param>
 /// <param name="width">Width of image.</param>
 /// <param name="height">Height of image.</param>
 /// <param name="imageOrientation">Image orientation.</param>
 public Face[] DetectInPixels32(Color32[] data, int width, int height, CGImageOrientation imageOrientation = CGImageOrientation.Default)
 {
     var cgimage = CGImage.FromPixels32(data, width, height, preprocessImageScale);
     var ciimage = new CIImage(cgimage);
     return DetectInImage(ciimage, imageOrientation);
 }
Пример #46
0
        /*
        public void DetectInPixels32Async(Action<Face[]> callback, Color32[] data, int width, int height, CGImageOrientation imageOrientation = CGImageOrientation.Default) {
            if (_opQueue == null)
                _opQueue = new NSOperationQueue();

            var cgimage = CGImage.FromPixels32(data, width, height, preprocessImageScale);
            data = null;

            _opQueue.AddOperation(delegate() {
                var ciimage = new CIImage(cgimage);
                cgimage = null;
                Face[] faces = DetectInImage(ciimage, imageOrientation);
                ciimage = null;

                CoreXT.RunOnMainThread(delegate() {
                    callback(faces);
                    faces = null;
                });
            });
        }
        */
        /// <summary>
        /// Detects the in image.
        /// </summary>
        /// <returns>The in image.</returns>
        /// <param name="ciimage">Ciimage.</param>
        /// <param name="imageOrientation">Image orientation.</param>
        public Face[] DetectInImage(CIImage ciimage, CGImageOrientation imageOrientation = CGImageOrientation.Default)
        {
            var rect = ciimage.Extent();
            int imageHeight = (int)rect.height;
            int imageWidth = (int)rect.width;
            _finalScale = projectedScale / preprocessImageScale;

            // options
            _imageOpts[CIDetector.ImageOrientation] = (int)imageOrientation;
            _imageOpts[CIDetector.Smile] = detectSmiles;
            _imageOpts[CIDetector.EyeBlink] = detectBlinks;

            // detect
            var features = _detector.FeaturesInImage(ciimage, _imageOpts);

            // go through features and transform coords
            var faces = new Face[features.Length];
            for (int i=0; i<features.Length; i++) {
                var feature = features[i] as CIFaceFeature;
                var face = new Face();

                face.bounds = _FixRect(feature.bounds, imageHeight, imageWidth, imageOrientation);

                if (feature.hasMouthPosition) {
                    face.hasMouthPosition = true;
                    face.mouthPosition = _FixPoint(feature.mouthPosition, imageHeight, imageWidth, imageOrientation);
                }

                if (feature.hasLeftEyePosition) {
                    face.hasLeftEyePosition = true;
                    face.leftEyePosition = _FixPoint(feature.leftEyePosition, imageHeight, imageWidth, imageOrientation);
                }

                if (feature.hasRightEyePosition) {
                    face.hasRightEyePosition = true;
                    face.rightEyePosition = _FixPoint(feature.rightEyePosition, imageHeight, imageWidth, imageOrientation);
                }

                if (feature.RespondsToSelector("trackingID")) {
                    if (feature.hasTrackingID) {
                        face.hasTrackingID = true;
                        face.trackingID = feature.trackingID;
                    }

                    if (feature.hasTrackingFrameCount) {
                        face.hasTrackingFrameCount = true;
                        face.trackingFrameCount = feature.trackingFrameCount;
                    }
                }

                if (feature.RespondsToSelector("faceAngle")) {
                    if (feature.hasFaceAngle) {
                        face.hasFaceAngle = true;
                        face.faceAngle = feature.faceAngle;
                    }

                    face.hasSmile = feature.hasSmile;
                    face.leftEyeClosed = feature.leftEyeClosed;
                    face.rightEyeClosed = feature.rightEyeClosed;
                }

                faces[i] = face;
            }

            return faces;
        }
Пример #47
0
 /// <summary>
 /// Detects the in image.
 /// </summary>
 /// <returns>The in image.</returns>
 /// <param name="image">Image.</param>
 /// <param name="imageOrientation">Image orientation.</param>
 public Face[] DetectInImage(Texture2D image, CGImageOrientation imageOrientation = CGImageOrientation.Default)
 {
     var cgimage = CGImage.FromTexture2D(image, preprocessImageScale);
     var ciimage = new CIImage(cgimage);
     return DetectInImage(ciimage, imageOrientation);
 }
        UIImage CropImage( UIImage sourceImage, CGRect cropDimension )
        {
            // step one, transform the crop region into image space.
            // (So pixelX is a pixel in the actual image, not the scaled screen)

            // convert our position on screen to where it should be in the image
            float pixelX = (float) (cropDimension.X * ScreenToImageScalar);
            float pixelY = (float) (cropDimension.Y * ScreenToImageScalar);

            // same for height, since the image was scaled down to fit the screen.
            float width = (float) cropDimension.Width * ScreenToImageScalar;
            float height = (float) cropDimension.Height * ScreenToImageScalar;


            // Now we're going to rotate the image to actually be "up" as the user
            // sees it. To do that, we simply rotate it according to the apple documentation.
            float rotationDegrees = 0.0f;

            switch ( sourceImage.Orientation )
            {
                case UIImageOrientation.Up:
                {
                    // don't do anything. The image space and the user space are 1:1
                    break;
                }
                case UIImageOrientation.Left:
                {
                    // the image space is rotated 90 degrees from user space,
                    // so do a CCW 90 degree rotation
                    rotationDegrees = 90.0f;
                    break;
                }
                case UIImageOrientation.Right:
                {
                    // the image space is rotated -90 degrees from user space,
                    // so do a CW 90 degree rotation
                    rotationDegrees = -90.0f;
                    break;
                }
                case UIImageOrientation.Down:
                {
                    rotationDegrees = 180;
                    break;
                }
            }
            
            // Now get a transform so we can rotate the image to be oriented the same as when the user previewed it
            CGAffineTransform fullImageTransform = GetImageTransformAboutCenter( rotationDegrees, sourceImage.Size );

            // apply to the image
            CIImage ciCorrectedImage = new CIImage( sourceImage.CGImage );
            CIImage ciCorrectedRotatedImage = ciCorrectedImage.ImageByApplyingTransform( fullImageTransform );

            // create a context and render it back out to a CGImage.
            CIContext ciContext = CIContext.FromOptions( null );
            CGImage rotatedCGImage = ciContext.CreateCGImage( ciCorrectedRotatedImage, ciCorrectedRotatedImage.Extent );

            // now the image is properly orientated, so we can crop it.
            CGRect cropRegion = new CGRect( pixelX, pixelY, width, height );
            CGImage croppedImage = rotatedCGImage.WithImageInRect( cropRegion );
            return new UIImage( croppedImage );
        }
		void UpdateImage (bool coalesce, bool animate)
		{
			if (!TryInit (animate))
				return;

			if (filtering) {
				needsFilter = true;
				return;
			}

			if (image == null)
				return;

			var blurFilter = (BlurFilter)GetFilter (BlurFilter.Key);
			var modifyFilter = (ModifyFilter)GetFilter (ModifyFilter.Key);
			bool dirty = blurFilter != null ? blurFilter.Dirty : false;
			dirty |= modifyFilter != null ? modifyFilter.Dirty : false;
			filtering = true;

			TryStartIndicatorForFilter ();

			Action runFilters = () => {
				var filterInput = new CIImage (image.CGImage);
				CIImage filteredCIImage = Apply(blurFilter, filterInput, dirty);

				filterInput = filteredCIImage ?? new CIImage (image.CGImage);
				filteredCIImage = Apply(modifyFilter, filterInput, dirty) ?? filteredCIImage;

				CGImage cgFilteredImage = null;
				if (filteredCIImage != null) {
					CIContext context = CIContext.FromOptions (new CIContextOptions{ UseSoftwareRenderer = false });
					cgFilteredImage = context.CreateCGImage (filteredCIImage, filteredCIImage.Extent);
				}

				if (coalesce)
					InvokeOnMainThread (() => Apply(cgFilteredImage, image, dirty));
				else
					Apply(cgFilteredImage, image, dirty);
			};

			if (coalesce)
				Task.Delay (250).ContinueWith (_ => runFilters());
			else
				runFilters ();

			blurFilter.Dirty = modifyFilter.Dirty = false;
		}
		void ApplyPhotoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
		{
			// Load the full size image.
			var inputImage = new CIImage (input.FullSizeImageUrl);

			// Apply the filter.
			filter.Image = inputImage.CreateWithOrientation (input.FullSizeImageOrientation);
			var outputImage = filter.OutputImage;

			// Write the edited image as a JPEG.
			// TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
			NSError error;
			if (!ciContext.WriteJpegRepresentation (outputImage, output.RenderedContentUrl, inputImage.ColorSpace (), new NSDictionary(), out error))
				throw new InvalidProgramException ($"can't apply filter to image: {error.LocalizedDescription}");

			completion ();
		}
Пример #51
0
 /// <summary>
 /// Sets the input image from an UIImage object.
 /// </summary>
 /// <param name='input'>
 /// input the input image.
 /// </param>
 public void SetInput(UIImage input)
 {
     _image = new CIImage(input);
 }
		CIImage Apply(BlurFilter blurFilter, CIImage input, bool dirty)
		{
			if (blurFilter == null || !blurFilter.Active || !dirty)
				return null;

			CIFilter filter = new CIGaussianBlur {
				Image = input,
				Radius = blurFilter.BlurRadius * 50,
			};
			return filter.OutputImage;
		}
Пример #53
0
 public async Task<bool> Recognise (CGImage image)
 {
     using (var ciImage = new CIImage (image)) {
         return await Recognise (ciImage);
     }
 }
Пример #54
0
 public async Task<bool> Recognise (CGImage image)
 {
     CheckIfInitialized ();
     using (var ciImage = new CIImage (image)) {
         return await Recognise (ciImage);
     }
 }
Пример #55
0
        public void SetEnhancedImage()
        {
            using (CIImage ciImage = new CIImage (_parent.ImageView.Image))
             {
            InvokeOnMainThread (() =>
            {
               using (var dict = new NSMutableDictionary ())
               {
                  var topLeft = new CGPoint (ConvertScreenToImageCoords (_markers [0].Location));
                  var topRight = new CGPoint (ConvertScreenToImageCoords (_markers [1].Location));
                  var bottomRight = new CGPoint (ConvertScreenToImageCoords (_markers [2].Location));
                  var bottomLeft = new CGPoint (ConvertScreenToImageCoords (_markers [3].Location));

                  dict.Add (key: new NSString ("inputTopLeft"), value: new CIVector (topLeft));
                  dict.Add (key: new NSString ("inputTopRight"), value: new CIVector (topRight));
                  dict.Add (key: new NSString ("inputBottomRight"), value: new CIVector (bottomRight));
                  dict.Add (key: new NSString ("inputBottomLeft"), value: new CIVector (bottomLeft));

                  using (var perspectiveCorrectedImage = ciImage.CreateByFiltering ("CIPerspectiveCorrection", dict))
                  {
                     using (var ctx = CIContext.FromOptions (null))
                     using (CGImage convertedCGImage = ctx.CreateCGImage (perspectiveCorrectedImage, perspectiveCorrectedImage.Extent))
                     using (UIImage convertedUIImage = UIImage.FromImage (convertedCGImage))
                     {
                        NSData imageData = convertedUIImage.AsJPEG ();
                        _encodedImage = imageData.GetBase64EncodedData (NSDataBase64EncodingOptions.None).ToString ();
                        _parent.ShowViewController (new BPreviewController (encodedImage: _encodedImage), this);
                     }
                  }
               }
            });
             }

             isCroppedImageDisplayed = false;
        }
Пример #56
0
 /// <summary>
 /// Detects the in image.
 /// </summary>
 /// <returns>The in image.</returns>
 /// <param name="image">Image.</param>
 /// <param name="imageOrientation">Image orientation.</param>
 public Face[] DetectInImage(Texture2D image, CGImageOrientation imageOrientation = CGImageOrientation.Default)
 {
     var cgimage = CGImage.FromTexture2D(image);
     var ciimage = new CIImage(cgimage);
     //			cgimage.Release();
     return DetectInImage(ciimage, imageOrientation);
 }
Пример #57
0
        public void UseDetector()
        {
            var options = new CIDetectorOptions {
            Accuracy = FaceDetectorAccuracy.High,
            AspectRatio = 1.41f
             };

             detector = CIDetector.CreateRectangleDetector (context: null, detectorOptions: options);

             using (CIImage ciImage = new CIImage (_parent.ImageView.Image))
             {
            InvokeOnMainThread (() =>
            {
               using (var dict = new NSMutableDictionary ())
               {
                  var orient = GetExifOrientation (_parent.ImageView.Image);
                  var rectangles = detector.FeaturesInImage (ciImage, orient);
                  if (rectangles.Length > 0)
                  {
                     _currRect = (CIRectangleFeature)rectangles [0];

                     _markers [0].Location = ConvertImageToScreenCoords (_currRect.TopLeft);
                     _markers [1].Location = ConvertImageToScreenCoords (_currRect.TopRight);
                     _markers [2].Location = ConvertImageToScreenCoords (_currRect.BottomRight);
                     _markers [3].Location = ConvertImageToScreenCoords (_currRect.BottomLeft);
                  }
               }
            });
             }
        }
Пример #58
0
        /// <summary>
        /// Renders to CGImage.
        /// </summary>
        /// <returns>The CGImage.</returns>
        /// <param name="rect">Rect.</param>
        public CGImage RenderToCGImage(Rect rect)
        {
            if (_image == null)
                return null;

            CGImage cgimage = _ciContext.CreateCGImage(_image, rect);
            _image = null;

            return cgimage;
        }
Пример #59
0
        /// <summary>
        /// Applies auto-adjust filters to the image. This method analyzes the image, determines what
        /// filters it needs, enhanced values for their parameters, and applies them.
        /// 
        /// The imageOrientation argument is to detect faces in the image so it can apply some filters only to the faces.
        /// See constants in FaceDetector for a list of values.
        /// </summary>
        /// <returns>
        /// object itself, for chaining filters.
        /// </returns>
        /// <param name='imageOrientation'>
        /// the orientation of the image; default is FaceDetector.IMAGE_ORIENTATION_DEFAULT.
        /// </param>
        /// <param name='autoEnhance'>
        /// whether to auto enhance the image.
        /// </param>
        /// <param name='autoRedEye'>
        /// whether to apply red-eye reduction.
        /// </param>
        /// <exception cref='U3DXTException'>
        /// Is thrown when the error.
        /// </exception>
        public ImageFilter AutoAdjust(int imageOrientation = 1, Boolean autoEnhance = true, Boolean autoRedEye = true)
        {
            if (_image == null)
                throw new U3DXTException("Must call setInput() first.");

            Dictionary<object,object> options = new Dictionary<object,object>();

            options["CIDetectorImageOrientation"] = imageOrientation;
            options[CIImage.kCIImageAutoAdjustEnhance] = autoEnhance;
            options[CIImage.kCIImageAutoAdjustRedEye] = autoRedEye;

            Array adjustments = _image.AutoAdjustmentFilters(options);

            foreach (CIFilter aFilter in adjustments) {
                aFilter.SetValueForKey(_image, CIFilter.kCIInputImageKey);
                _image = aFilter.ValueForKey(CIFilter.kCIOutputImageKey) as CIImage;
            }

            return this;
        }
		CIImage Apply(ModifyFilter modifyFilter, CIImage input, bool dirty)
		{
			if (modifyFilter == null || !modifyFilter.Active || !dirty)
				return null;

			CIFilter filter = new CISepiaTone {
				Image = input,
				Intensity = modifyFilter.Intensity,
			};
			return filter.OutputImage;
		}