void UpdateFilterPreview() { using (CIImage outputImage = ciFilter.OutputImage) { using (CGImage cgImage = ciContext.CreateCGImage(outputImage, outputImage.Extent)) { TryDisposeFilterPreviewImg(); FilterPreviewView.Image = UIImage.FromImage(cgImage); } } }
private void UpdateFilterPreview() { CIImage outputImage = ciFilter.OutputImage; UIImage transformedImage; using (CGImage cgImage = ciContext.CreateCGImage(outputImage, outputImage.Extent)) transformedImage = UIImage.FromImage(cgImage); FilterPreviewView.Image = transformedImage; }
//public byte[] Sepia(Stream imageStream) //{ // byte[] ImageConv = new byte[0]; // var imageData = NSData.FromStream(imageStream); // CIImage flower = CIImage.FromCGImage(UIImage.LoadFromData(imageData).CGImage); // //var flower = UIImage.LoadFromData(imageData); // var sepia = new CISepiaTone() // { // Image = flower, // Intensity = .8f // }; // CIImage output = sepia.OutputImage; // var context = CIContext.FromOptions(null); // var cgimage = context.CreateCGImage(output, output.Extent); // //-------------------Return // CGSize size = new CGSize(100, 100); //modify as necessary // UIGraphics.BeginImageContext(size); // CGRect rect = new CGRect(CGPoint.Empty, size); // UIImage.FromImage(cgimage).Draw(rect); // UIImage image = UIGraphics.GetImageFromCurrentImageContext(); // NSData jpegData = image.AsJPEG(); // UIGraphics.EndImageContext(); // byte[] dataBytes = new byte[jpegData.Length]; // System.Runtime.InteropServices.Marshal.Copy(jpegData.Bytes, dataBytes, 0, Convert.ToInt32(jpegData.Length)); // return dataBytes; //} public byte[] Sepia(byte[] imageIos) { byte[] ImageConv = new byte[0]; var imageData = NSData.FromArray(imageIos); CIImage ImageOriginal = CIImage.FromCGImage(UIImage.LoadFromData(imageData).CGImage); var sepia = new CISepiaTone() { Image = ImageOriginal, Intensity = 1.0f }; CIContext ctx = CIContext.FromOptions(null); var output = sepia.OutputImage; var cgImage = ctx.CreateCGImage(output, output.Extent); //--------- CGSize size = new CGSize(400, 400); //Tamaño que se desplegará la imagen UIGraphics.BeginImageContext(size); CGRect rect = new CGRect(CGPoint.Empty, size); //UIImage.FromImage(flower).Draw(rect); UIImage.FromImage(cgImage).Draw(rect); UIImage image = UIGraphics.GetImageFromCurrentImageContext(); //NSData jpegData = image.AsJPEG(1); NSData jpegData = image.AsPNG(); UIGraphics.EndImageContext(); byte[] iamgenEnSepia = jpegData.ToArray(); return(iamgenEnSepia); }
public static UIImage Blur(this UIImage image, float radius) { try { var imageToBlur = CIImage.FromCGImage(image.CGImage); var transform = new CIAffineClamp(); transform.Transform = CGAffineTransform.MakeIdentity(); transform.Image = imageToBlur; var gaussianBlurFilter = new CIGaussianBlur(); gaussianBlurFilter.Image = transform.OutputImage; gaussianBlurFilter.Radius = radius; if (context == null) { context = CIContext.FromOptions(null); } var resultImage = gaussianBlurFilter.OutputImage; var finalImage = UIImage.FromImage( context.CreateCGImage(resultImage, new RectangleF(PointF.Empty, image.Size)), 1, UIImageOrientation.Up); return(finalImage); } catch (Exception ex) { Console.WriteLine(ex); return(image); } }
void monovoid() { byte[] ImageConv = new byte[0]; var imageData = NSData.FromArray(ImageConv); CIImage flower = CIImage.FromCGImage(UIImage.LoadFromData(imageData).CGImage); var sepia = new CISepiaTone() { Image = flower, Intensity = 0.9f }; CIContext ctx = CIContext.FromOptions(null); var output = sepia.OutputImage; var cgImage = ctx.CreateCGImage(output, output.Extent); CGSize size = new CGSize(400, 400); //modify as necessary UIGraphics.BeginImageContext(size); CGRect rect = new CGRect(CGPoint.Empty, size); UIImage.FromImage(cgImage).Draw(rect); UIImage image = UIGraphics.GetImageFromCurrentImageContext(); //NSData jpegData = image.AsJPEG(1); NSData jpegData = image.AsPNG(); UIGraphics.EndImageContext(); byte[] bt = jpegData.ToArray(); }
CIColorControls colorCtrls; //CIFilter UIImage AdjustImage(UIImage image) { if (colorCtrls == null) { colorCtrls = new CIColorControls() { Image = CIImage.FromCGImage(image.CGImage) } } ; else { colorCtrls.Image = CIImage.FromCGImage(image.CGImage); } if (context == null) { context = CIContext.FromOptions(null); } colorCtrls.Brightness = sliderB.Value; colorCtrls.Saturation = sliderS.Value; colorCtrls.Contrast = sliderC.Value; using (var outputImage = colorCtrls.OutputImage) { var result = context.CreateCGImage(outputImage, outputImage.Extent); return(UIImage.FromImage(result)); } }
public void CIKernel_BasicTest() { if (!TestRuntime.CheckSystemAndSDKVersion(8, 0)) { Assert.Inconclusive("Custom filters require iOS8+"); } Exception ex = null; var t = new Thread(() => { // This code will crash if an MKMapView has been created previously on // the same thread, so just run it on a different thread (MKMapViews can // only be created on the main thread). This is obviously an Apple bug, // and a radar has been filed: 19249153. ObjC test case: https://github.com/rolfbjarne/CIKernelMKMapViewCrash try { PlatformImage uiImg = new PlatformImage(NSBundle.MainBundle.PathForResource("Xam", "png", "CoreImage")); #if MONOMAC CIImage ciImg = new CIImage(uiImg.CGImage); CIContext context = new CIContext(null); #else CIImage ciImg = new CIImage(uiImg); CIContext context = CIContext.FromOptions(null); #endif foreach (CustomerFilterType type in Enum.GetValues(typeof(CustomerFilterType))) { MyCustomFilter filter = new MyCustomFilter(type); filter.MyImage = ciImg; CIImage outputImage = filter.OutputImage; CGImage cgImage = context.CreateCGImage(outputImage, outputImage.Extent); #if MONOMAC NSImage finalImg = new NSImage(cgImage, new CGSize()); #else UIImage finalImg = new UIImage(cgImage); #endif Assert.IsNotNull(finalImg, "CIKernel_BasicTest should not be null"); Assert.IsTrue(filter.CallbackHit, "CIKernel_BasicTest callback must be hit"); if (filter.IsColorKernel) { Assert.IsTrue(filter.kernel is CIColorKernel, "CIKernel_BasicTest we disagree that it is a color kernel"); } else { Assert.IsTrue(filter.kernel is CIWarpKernel, "CIKernel_BasicTest we disagree that it is a warp kernel"); } } } catch (Exception ex2) { ex = ex2; } }); t.Start(); t.Join(); if (ex != null) { throw ex; } }
UIImage CreateImage(CIImage img) { CIContext context = CreateContext(); CGImage cgImage = context.CreateCGImage(img, img.Extent); return(new UIImage(cgImage)); }
public static UIImage ResizeImageWithAspectRatio(this UIImage imageSource, float scale) { if (scale > 1.0f) { return(imageSource); } using (CIContext c = CIContext.Create()) { var sourceImage = CIImage.FromCGImage(imageSource.CGImage); var f = new CILanczosScaleTransform { Scale = scale, Image = sourceImage, AspectRatio = 1.0f }; var output = f.OutputImage; var cgi = c.CreateCGImage(output, output.Extent); return(UIImage.FromImage(cgi, 1.0f, imageSource.Orientation)); } }
CIColorControls colorCtrls; //CIFilter void HandleValueChanged(object sender, EventArgs e) { // use the low-res version if (colorCtrls == null) { colorCtrls = new CIColorControls() { Image = CIImage.FromCGImage(sourceImage.CGImage) } } ; else { colorCtrls.Image = CIImage.FromCGImage(sourceImage.CGImage); } if (context == null) { context = CIContext.FromOptions(null); } colorCtrls.Brightness = sliderBrightness.Value; colorCtrls.Saturation = sliderSaturation.Value; colorCtrls.Contrast = sliderContrast.Value; using (var outputImage = colorCtrls.OutputImage) { var result = context.CreateCGImage(outputImage, outputImage.Extent); imageView.Image = UIImage.FromImage(result); } }
public static UIImage MakeUIImageFromCIImage(this CIImage ciImage) { CIContext context = CIContext.Create(); CGImage cgImage = context.CreateCGImage(ciImage, ciImage.Extent);//[context createCGImage: ciImage fromRect:[ciImage extent]]; UIImage uiImage = new UIImage(cgImage); cgImage.Dispose(); return(uiImage); }
private UIImage GetUIImage(CMSampleBuffer sampleBuffer) { using (var imageBuffer = sampleBuffer.GetImageBuffer()) using (var ciImage = new CIImage(imageBuffer)) using (var ciContext = new CIContext()) using (var cgImage = ciContext.CreateCGImage(ciImage, ciImage.Extent)) { var uiImage = new UIImage(cgImage); return(uiImage); } }
public static NSData GetJpegRepresentation(this CIImage image, float compressionQuality) { if (ciContext == null) { var eaglContext = new EAGLContext(EAGLRenderingAPI.OpenGLES2); ciContext = CIContext.FromContext(eaglContext); } using (CGImage outputImageRef = ciContext.CreateCGImage(image, image.Extent)) { using (UIImage uiImage = UIImage.FromImage(outputImageRef, 1f, UIImageOrientation.Up)) { return(uiImage.AsJPEG(compressionQuality)); } } }
public static CGImage GenerateCheckerboard(CGRect frame, CIColor c0, CIColor c1) { using (var board = new CICheckerboardGenerator() { Color0 = c0, Color1 = c1, Width = (float)Math.Min(frame.Height / 2f, 10), Center = new CIVector(new nfloat[] { 0, 0 }), }) { using (var context = new CIContext(null)) { return(context.CreateCGImage(board.OutputImage, new CGRect(0, 0, frame.Width, frame.Height))); } } }
UIImage machineReadableCodeFromMessage(string message) { var mrcFilter = CIFilter.FromName("CIQRCodeGenerator"); NSData messageData = NSData.FromString(new NSString(message), NSStringEncoding.UTF8); mrcFilter.SetValueForKey(messageData, (NSString)"inputMessage"); var barcodeCIImage = (CIImage)mrcFilter.ValueForKey((NSString)"outputImage"); CGRect extent = barcodeCIImage.Extent; CGImage barcodeCGImage = CIContext.CreateCGImage(barcodeCIImage, extent); UIImage image = new UIImage(barcodeCGImage); return(image); }
private static UIImage CreateBlurImage(UIImage image) { using (CIImage inputImage = new CIImage(image)) using (CIGaussianBlur blur = new CIGaussianBlur()) using (CIContext context = CIContext.FromOptions(new CIContextOptions { UseSoftwareRenderer = false })) { blur.Image = inputImage; blur.Radius = 3; using (CIImage outputImage = blur.OutputImage) using (CIImage cgImage = context.CreateCGImage(outputImage, new CGRect(new CGPoint(0, 0), image.Size))) { return(UIImage.FromImage(cgImage)); } } }
public static UIImage CreateScreenshot(this ARSCNView view, UIDeviceOrientation interfaceOrientation) { if (view.Session.CurrentFrame == null) { return(null); } var ciImage = new CIImage(view.Session.CurrentFrame.CapturedImage); // TODO: check var keys = new NSString[] { CIFilterInputKey.Scale, CIFilterInputKey.AspectRatio }; var objects = new NSNumber[] { new NSNumber(0.5f), new NSNumber(1f) }; var dictionary = NSDictionary.FromObjectsAndKeys(objects, keys); var scaledImage = ciImage.CreateByFiltering("CILanczosScaleTransform", dictionary); var context = new CIContext(new CIContextOptions { UseSoftwareRenderer = false }); var cgimage = context.CreateCGImage(scaledImage, scaledImage.Extent); if (cgimage != null) { var orientation = UIImageOrientation.Right; switch (interfaceOrientation) { case UIDeviceOrientation.Portrait: orientation = UIImageOrientation.Right; break; case UIDeviceOrientation.PortraitUpsideDown: orientation = UIImageOrientation.Left; break; case UIDeviceOrientation.LandscapeLeft: orientation = UIImageOrientation.Up; break; case UIDeviceOrientation.LandscapeRight: orientation = UIImageOrientation.Down; break; } return(new UIImage(cgimage, 1f, orientation)); } return(null); }
private void RedrawFilterView() { var isAnimationEnabled = IsAnimationAllowed(); if (!isAnimationEnabled) { return; } var scale = UIScreen.MainScreen.Scale; var radius = (float)(UIScreen.MainScreen.Bounds.Width >= 414 ? kBlurRadiusIphonePlus : kBlurRadiusDefault); var bottomMargin = 10f; var offsetY = -contentView.Bounds.Height / 2; var bounds = new CGRect(valueView.Frame.X, offsetY, valueView.Frame.Size.Width, -offsetY + bottomMargin).Inset(-radius, 0); var inputImage = new UIGraphicsImageRenderer(Bounds.Size).CreateImage( (obj) => contentView.Layer.RenderInContext(obj.CGContext) ); filter.BlurRadius = radius; filter.Threshold = 0.49f; filter.BackgroundColor = contentViewColor; filter.AntialiasingRadius = (float)(scale / 2); filter.InputImage = new CIImage(inputImage.CGImage); var outputImage = filter.OutputImage?.ImageByCroppingToRect( new CGRect(0, 0, inputImage.Size.Width * scale, inputImage.Size.Height * scale)); var cgImage = context.CreateCGImage(outputImage, outputImage.Extent); filterView.Image = UIImage.FromImage(cgImage, scale, UIImageOrientation.Up); filterView.Frame = bounds; if (filterViewMask == null) { var renderer = new UIGraphicsImageRenderer(new CGRect(new CGPoint(), Bounds.Size).Size); filterViewMask = renderer.CreateImage((ctx) => { UIColor.White.SetFill(); ctx.FillRect(new CGRect(new CGPoint(), bounds.Size)); ctx.CGContext.ClearRect(new CGRect(0, bounds.Size.Height - bottomMargin, radius, bottomMargin)); ctx.CGContext.ClearRect(new CGRect(bounds.Size.Width - radius, bounds.Size.Height - bottomMargin, radius, bottomMargin)); }); ((UIImageView)filterView.MaskView).Image = filterViewMask; } }
public static UIImage Blur(this UIImage image, float radius) { if (image == null) { return(null); } try { var imageToBlur = CIImage.FromCGImage(image.CGImage); if (imageToBlur == null) { return(image); } var transform = new CIAffineClamp { Transform = CGAffineTransform.MakeIdentity(), Image = imageToBlur }; var gaussianBlurFilter = new CIGaussianBlur { Image = transform.OutputImage, Radius = radius }; if (context == null) { context = CIContext.FromOptions(null); } var resultImage = gaussianBlurFilter.OutputImage; var finalImage = UIImage.FromImage(context.CreateCGImage(resultImage, new CGRect(CGPoint.Empty, image.Size)), 1, UIImageOrientation.Up); return(finalImage); } catch (Exception ex) { LogManager.Shared.Report(ex); return(image); } }
UIImage CreateNonInterpolatedUIImageFormCIImage(CIImage image, nfloat size) { CGRect extent = image.Extent; float scale = (float)Math.Min(size / extent.Width, size / extent.Height); // 创建bitmap; int width = (int)(extent.Width * scale); int height = (int)(extent.Height * scale); CGColorSpace colorSpace = CGColorSpace.CreateDeviceGray(); // var rawData = Marshal.AllocHGlobal(height * width * 4); CGBitmapContext bitmapRef = new CGBitmapContext(null, width, height, 8, 0, colorSpace, CGImageAlphaInfo.None); CIContext context = CIContext.FromOptions(null); CGImage bitmapImage = context.CreateCGImage(image, extent); bitmapRef.InterpolationQuality = CGInterpolationQuality.None; bitmapRef.ScaleCTM(scale, scale); bitmapRef.DrawImage(extent, bitmapImage); // 保存bitmap到图片 CGImage scaledImage = bitmapRef.ToImage(); return(new UIImage(scaledImage)); }
void UpdateImage(bool coalesce, bool animate) { if (!TryInit(animate)) { return; } if (filtering) { needsFilter = true; return; } if (image == null) { return; } var blurFilter = (BlurFilter)GetFilter(BlurFilter.Key); var modifyFilter = (ModifyFilter)GetFilter(ModifyFilter.Key); bool dirty = blurFilter != null ? blurFilter.Dirty : false; dirty |= modifyFilter != null ? modifyFilter.Dirty : false; filtering = true; TryStartIndicatorForFilter(); Action runFilters = () => { var filterInput = new CIImage(image.CGImage); CIImage filteredCIImage = Apply(blurFilter, filterInput, dirty); filterInput = filteredCIImage ?? new CIImage(image.CGImage); filteredCIImage = Apply(modifyFilter, filterInput, dirty) ?? filteredCIImage; CGImage cgFilteredImage = null; if (filteredCIImage != null) { CIContext context = CIContext.FromOptions(new CIContextOptions { UseSoftwareRenderer = false }); cgFilteredImage = context.CreateCGImage(filteredCIImage, filteredCIImage.Extent); } if (coalesce) { InvokeOnMainThread(() => Apply(cgFilteredImage, image, dirty)); } else { Apply(cgFilteredImage, image, dirty); } }; if (coalesce) { Task.Delay(250).ContinueWith(_ => runFilters()); } else { runFilters(); } blurFilter.Dirty = modifyFilter.Dirty = false; }
public void DrawImage(Image image, Rectangle destRect, float srcX, float srcY, float srcWidth, float srcHeight, GraphicsUnit srcUnit, ImageAttributes imageAttrs) { if (image == null) { throw new ArgumentNullException("image"); } var srcRect1 = new RectangleF(srcX, srcY, srcWidth, srcHeight); // If the source units are not the same we need to convert them // The reason we check for Pixel here is that our graphics already has the Pixel's baked into the model view transform if (srcUnit != graphicsUnit && srcUnit != GraphicsUnit.Pixel) { ConversionHelpers.GraphicsUnitConversion(srcUnit, graphicsUnit, image.HorizontalResolution, image.VerticalResolution, ref srcRect1); } if (image.NativeCGImage == null) { DrawImage(image, destRect); return; } // Obtain the subImage var subImage = image.NativeCGImage.WithImageInRect(srcRect1.ToCGRect()); // If we do not have anything to draw then we exit here if (subImage.Width == 0 || subImage.Height == 0) { return; } // var transform = image.imageTransform; //// // Reset our height on the transform to account for subImage // transform.y0 = subImage.Height; //// //// // Make sure we scale the image in case the source rectangle //// // overruns our subimage bouncs width and/or height // float scaleX = subImage.Width/srcRect1.Width; // float scaleY = subImage.Height/srcRect1.Height; // transform.Scale (scaleX, scaleY); bool attributesSet = imageAttrs != null && (imageAttrs.isColorMatrixSet || imageAttrs.isGammaSet); if (attributesSet) { InitializeImagingContext(); CIImage result = subImage; if (imageAttrs.isColorMatrixSet) { var ciFilter = CIFilter.FromName("CIColorMatrix"); ciFilter.SetDefaults(); ciFilter.SetValueForKey(result, new NSString("inputImage")); var inputRVector = new CIVector(imageAttrs.colorMatrix.Matrix00, imageAttrs.colorMatrix.Matrix01, imageAttrs.colorMatrix.Matrix02, imageAttrs.colorMatrix.Matrix03); var inputGVector = new CIVector(imageAttrs.colorMatrix.Matrix10, imageAttrs.colorMatrix.Matrix11, imageAttrs.colorMatrix.Matrix12, imageAttrs.colorMatrix.Matrix13); var inputBVector = new CIVector(imageAttrs.colorMatrix.Matrix20, imageAttrs.colorMatrix.Matrix21, imageAttrs.colorMatrix.Matrix22, imageAttrs.colorMatrix.Matrix23); var inputAVector = new CIVector(imageAttrs.colorMatrix.Matrix30, imageAttrs.colorMatrix.Matrix31, imageAttrs.colorMatrix.Matrix32, imageAttrs.colorMatrix.Matrix33); var inputBiasVector = new CIVector(imageAttrs.colorMatrix.Matrix40, imageAttrs.colorMatrix.Matrix41, imageAttrs.colorMatrix.Matrix42, imageAttrs.colorMatrix.Matrix43); ciFilter.SetValueForKey(inputRVector, new NSString("inputRVector")); ciFilter.SetValueForKey(inputGVector, new NSString("inputGVector")); ciFilter.SetValueForKey(inputBVector, new NSString("inputBVector")); ciFilter.SetValueForKey(inputAVector, new NSString("inputAVector")); ciFilter.SetValueForKey(inputBiasVector, new NSString("inputBiasVector")); result = (CIImage)ciFilter.ValueForKey(new NSString("outputImage")); } if (imageAttrs.isGammaSet) { var ciFilter = CIFilter.FromName("CIGammaAdjust"); ciFilter.SetDefaults(); ciFilter.SetValueForKey(result, new NSString("inputImage")); var inputPower = NSNumber.FromFloat(imageAttrs.gamma); ciFilter.SetValueForKey(inputPower, new NSString("inputPower")); result = (CIImage)ciFilter.ValueForKey(new NSString("outputImage")); } subImage = ciContext.CreateCGImage(result, result.Extent); } transform = image.imageTransform; transform.y0 = subImage.Height; float scaleX1 = subImage.Width / srcRect1.Width; float scaleY1 = subImage.Height / srcRect1.Height; transform.Scale(scaleX1, scaleY1); // Now draw our image DrawImage(destRect, subImage, transform); }
UIImage CropImage(UIImage sourceImage, CGRect cropDimension) { // step one, transform the crop region into image space. // (So pixelX is a pixel in the actual image, not the scaled screen) // convert our position on screen to where it should be in the image float pixelX = (float)(cropDimension.X * ScreenToImageScalar); float pixelY = (float)(cropDimension.Y * ScreenToImageScalar); // same for height, since the image was scaled down to fit the screen. float width = (float)cropDimension.Width * ScreenToImageScalar; float height = (float)cropDimension.Height * ScreenToImageScalar; // Now we're going to rotate the image to actually be "up" as the user // sees it. To do that, we simply rotate it according to the apple documentation. float rotationDegrees = 0.0f; switch (sourceImage.Orientation) { case UIImageOrientation.Up: { // don't do anything. The image space and the user space are 1:1 break; } case UIImageOrientation.Left: { // the image space is rotated 90 degrees from user space, // so do a CCW 90 degree rotation rotationDegrees = 90.0f; break; } case UIImageOrientation.Right: { // the image space is rotated -90 degrees from user space, // so do a CW 90 degree rotation rotationDegrees = -90.0f; break; } case UIImageOrientation.Down: { rotationDegrees = 180; break; } } // Now get a transform so we can rotate the image to be oriented the same as when the user previewed it CGAffineTransform fullImageTransform = GetImageTransformAboutCenter(rotationDegrees, sourceImage.Size); // apply to the image CIImage ciCorrectedImage = new CIImage(sourceImage.CGImage); CIImage ciCorrectedRotatedImage = ciCorrectedImage.ImageByApplyingTransform(fullImageTransform); // create a context and render it back out to a CGImage. CIContext ciContext = CIContext.FromOptions(null); CGImage rotatedCGImage = ciContext.CreateCGImage(ciCorrectedRotatedImage, ciCorrectedRotatedImage.Extent); // now the image is properly orientated, so we can crop it. CGRect cropRegion = new CGRect(pixelX, pixelY, width, height); CGImage croppedImage = rotatedCGImage.WithImageInRect(cropRegion); return(new UIImage(croppedImage)); }
void CameraImageCaptured(object sender, UIImagePickerMediaPickedEventArgs e) { bool result = false; string imagePath = null; // create a url of the path for the file to write NSUrl imageDestUrl = NSUrl.CreateFileUrl(new string[] { ImageDest }); // create a CGImage destination that converts the image to jpeg CGImageDestination cgImageDest = CGImageDestination.Create(imageDestUrl, MobileCoreServices.UTType.JPEG, 1); if (cgImageDest != null) { // note: the edited image is saved "correctly", so we don't have to rotate. // rotate the image 0 degrees since we consider portrait to be the default position. CIImage ciImage = new CIImage(e.OriginalImage.CGImage); float rotationDegrees = 0.00f; switch (e.OriginalImage.Orientation) { case UIImageOrientation.Up: { // don't do anything. The image space and the user space are 1:1 break; } case UIImageOrientation.Left: { // the image space is rotated 90 degrees from user space, // so do a CCW 90 degree rotation rotationDegrees = 90.0f; break; } case UIImageOrientation.Right: { // the image space is rotated -90 degrees from user space, // so do a CW 90 degree rotation rotationDegrees = -90.0f; break; } case UIImageOrientation.Down: { rotationDegrees = 180; break; } } // create our transform and apply it to the image CGAffineTransform transform = CGAffineTransform.MakeIdentity( ); transform.Rotate(rotationDegrees * Rock.Mobile.Math.Util.DegToRad); CIImage rotatedImage = ciImage.ImageByApplyingTransform(transform); // create a context and render it back out to a CGImage. (Cast to ints so we account for any floating point error) CIContext ciContext = CIContext.FromOptions(null); CGImage rotatedCGImage = ciContext.CreateCGImage(rotatedImage, new System.Drawing.RectangleF((int)rotatedImage.Extent.X, (int)rotatedImage.Extent.Y, (int)rotatedImage.Extent.Width, (int)rotatedImage.Extent.Height)); // put the image in the destination, converting it to jpeg. cgImageDest.AddImage(rotatedCGImage); // close and dispose. if (cgImageDest.Close( )) { result = true; imagePath = ImageDest; cgImageDest.Dispose( ); } } CameraFinishedCallback(result, imagePath); }