public override void DidOrderIn(PresentationViewController presentationViewController) { var banana = Utils.SCAddChildNode(ContentNode, "banana", "Scenes.scnassets/banana/banana", 5); banana.Rotation = new SCNVector4(1, 0, 0, -(float)(Math.PI / 2)); banana.RunAction(SCNAction.RepeatActionForever(SCNAction.RotateBy(0, NMath.PI * 2, 0, 1.5f))); banana.Position = new SCNVector3(2.5f, 5, 10); var gaussianBlurFilter = new CIGaussianBlur() { Radius = 10 }; gaussianBlurFilter.SetDefaults(); banana.Filters = new CIFilter[] { gaussianBlurFilter }; banana = (SCNNode)banana.Copy(); ContentNode.AddChildNode(banana); banana.Position = new SCNVector3(6, 5, 10); var pixellateFilter = new CIPixellate(); pixellateFilter.SetDefaults(); banana.Filters = new CIFilter[] { pixellateFilter }; banana = (SCNNode)banana.Copy(); ContentNode.AddChildNode(banana); banana.Position = new SCNVector3(9.5f, 5, 10); var filter = CIFilter.FromName("CIEdgeWork"); filter.SetDefaults(); banana.Filters = new CIFilter[] { filter }; }
// this test checks that all native filters have a managed peer, i.e. against missing filters public void CheckNativeFilters() { Errors = 0; List <string> filters = new List <string> (); int n = 0; string qname = CIFilterType.AssemblyQualifiedName; // that will give us only the list of filters supported by the executing version of iOS foreach (var filter_name in CIFilter.FilterNamesInCategories(null)) { if (Skip(filter_name)) { continue; } string type_name = qname.Replace("CIFilter", filter_name); if (Type.GetType(type_name, false, true) == null) { filters.Add(filter_name); if (BindingOutput != null) { GenerateBinding(CIFilter.FromName(filter_name), BindingOutput); } } n++; } Assert.That(filters.Count, Is.EqualTo(0), "{0} native filters missing: {1}", filters.Count, String.Join(", ", filters)); }
void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load AVAsset to process from input. var avAsset = input.AudiovisualAsset; if (avAsset == null) { throw new InvalidProgramException("can't get AV asset to edit"); } // Set up a video composition to apply the filter. var composition = AVVideoComposition.CreateVideoComposition(avAsset, request => { filter.Image = request.SourceImage; var filtered = filter.OutputImage; request.Finish(filtered, null); }); // Export the video composition to the output URL. // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443 var export = new AVAssetExportSession(avAsset, AVAssetExportSession.PresetHighestQuality) { OutputFileType = AVFileType.QuickTimeMovie, OutputUrl = output.RenderedContentUrl, VideoComposition = composition }; export.ExportAsynchronously(completion); }
// // Utility function used by pure-output generation filters // public CIImage Crop(CIFilter input) { return(new CICrop() { Image = input.OutputImage, Rectangle = new CIVector(0, 0, window.Bounds.Width, window.Bounds.Height) }.OutputImage); }
static void HandleFalseColorFilter(CIFilter filter) { if (filter.Name != "CIFalseColor") return; var inputColor0 = new CIColor(UIColor.Red); var inputColor1 = new CIColor(UIColor.Blue); filter[new NSString("inputColor0")] = inputColor0; filter[new NSString("inputColor1")] = inputColor1; }
private CIFilter[] CompositionFilters() { var blurFilter = CIFilter.FromName("CIGaussianBlur"); blurFilter.SetDefaults(); blurFilter.SetValueForKey((NSNumber)2, (NSString)"inputRadius"); return(new CIFilter[] { blurFilter }); }
public CIImage OutputImage() { var inputImage = ValueForKey(new NSString("inputImage")); if (inputImage == null) { return(null); } // Monochrome var monochromeFilter = CIFilter.FromName("CIColorMatrix"); monochromeFilter.SetDefaults(); monochromeFilter.SetValueForKey(CIVector.Create(0, 0, 0), new NSString("inputRVector")); monochromeFilter.SetValueForKey(CIVector.Create(0, 0, 0.4f), new NSString("inputGVector")); monochromeFilter.SetValueForKey(CIVector.Create(0, 0, 1), new NSString("inputBVector")); monochromeFilter.SetValueForKey(CIVector.Create(0, 0, 1), new NSString("inputAVector")); monochromeFilter.SetValueForKey(inputImage, new NSString("inputImage")); var glowImage = (CIImage)monochromeFilter.ValueForKey(new NSString("outputImage")); // Scale var centerX = CenterX; var centerY = CenterY; if (centerX > 0) { var transform = new NSAffineTransform(); transform.Translate(centerX, centerY); transform.Scale(1.2f); transform.Translate(-centerX, -centerY); var affineTransformFilter = CIFilter.FromName("CIAffineTransform"); affineTransformFilter.SetDefaults(); affineTransformFilter.SetValueForKey(transform, new NSString("inputTransform")); affineTransformFilter.SetValueForKey(glowImage, new NSString("inputImage")); glowImage = (CIImage)affineTransformFilter.ValueForKey(new NSString("outputImage")); } // Blur var gaussianBlurFilter = CIFilter.FromName("CIGaussianBlur"); gaussianBlurFilter.SetDefaults(); gaussianBlurFilter.SetValueForKey(glowImage, new NSString("inputImage")); gaussianBlurFilter.SetValueForKey(InputRadius != null ? InputRadius : new NSNumber(10.0f), new NSString("inputRadius")); glowImage = (CIImage)gaussianBlurFilter.ValueForKey(new NSString("outputImage")); // Blend var blendFilter = CIFilter.FromName("CISourceOverCompositing"); blendFilter.SetDefaults(); blendFilter.SetValueForKey(glowImage, new NSString("inputBackgroundImage")); blendFilter.SetValueForKey(blendFilter, new NSString("inputImage")); glowImage = (CIImage)blendFilter.ValueForKey(new NSString("outputImage")); return(glowImage); }
static void DisplayFilterOutput(CIFilter filter, UIImageView imageView) { CIImage output = filter.OutputImage; if (output == null) return; var context = CIContext.FromOptions(null); var renderedImage = context.CreateCGImage(output, output.Extent); var finalImage = new UIImage(renderedImage); imageView.Image = finalImage; }
void TryDisposeFilter() { if (ciFilter == null) { return; } ciFilter.Dispose(); ciFilter = null; }
// On load, construct the CIRawFilter public override void ViewDidLoad () { base.ViewDidLoad (); var asset = Asset; if (asset == null) return; // Setup options to request original image. var options = new PHImageRequestOptions { Version = PHImageRequestOptionsVersion.Original, Synchronous = true }; // Request the image data and UTI type for the image. PHImageManager.DefaultManager.RequestImageData (asset, options, (imageData, dataUTI, _, __) => { if (imageData == null || dataUTI == null) return; // Create a CIRawFilter from original image data. // UTI type is passed in to provide the CIRawFilter with a hint about the UTI type of the Raw file. //var rawOptions = [String (kCGImageSourceTypeIdentifierHint) : dataUTI ] var rawOptions = new NSMutableDictionary (); var imageIOLibrary = Dlfcn.dlopen ("/System/Library/Frameworks/ImageIO.framework/ImageIO", 0); var key = Dlfcn.GetIntPtr (imageIOLibrary, "kCGImageSourceTypeIdentifierHint"); rawOptions.LowlevelSetObject (dataUTI, key); ciRawFilter = CIFilter.CreateRawFilter (imageData, rawOptions); if (ciRawFilter == null) return; // Get the native size of the image produced by the CIRawFilter. var sizeValue = ciRawFilter.ValueForKey (Keys.kCIOutputNativeSizeKey) as CIVector; if (sizeValue != null) imageNativeSize = new CGSize (sizeValue.X, sizeValue.Y); // Record the original value of the temperature, and setup the editing slider. var tempValue = (NSNumber)ciRawFilter.ValueForKey (Keys.kCIInputNeutralTemperatureKey); if (tempValue != null) { originalTemp = tempValue.FloatValue; TempSlider.SetValue (tempValue.FloatValue, animated: false); } // Record the original value of the tint, and setup the editing slider. var tintValue = (NSNumber)ciRawFilter.ValueForKey (Keys.kCIInputNeutralTintKey); if (tintValue != null) { originalTint = tintValue.FloatValue; TintSlider.SetValue (tintValue.FloatValue, animated: false); } }); // Create EAGL context used to render the CIImage produced by the CIRawFilter to display. ImageView.Context = new EAGLContext (EAGLRenderingAPI.OpenGLES3); ciContext = CIContext.FromContext (ImageView.Context, new CIContextOptions { CIImageFormat = CIImage.FormatRGBAh }); }
private void UpdateFilter() { ciFilter = CIFilter.FromName(selectedFilterName); var inputImage = CIImage.FromCGImage(this.inputImage.CGImage); CIImageOrientation orientation = Convert(this.inputImage.Orientation); inputImage = inputImage.CreateWithOrientation(orientation); ciFilter.Image = inputImage; }
private void Pointalize() { CIVector center = CIVector.Create(Bounds.GetMidX(), Bounds.GetMidY()); CIFilter pointalize = CIFilter.FromName("CIPointillize"); pointalize.SetValueForKey(NSNumber.FromFloat(1), CIFilterInputKey.Radius); pointalize.SetValueForKey(center, CIFilterInputKey.Center); controls.ContentFilters = new CIFilter[] { pointalize }; }
// this test checks that all managed filters have a native peer, i.e. against extra filters public void CheckManagedFilters() { List <string> filters = new List <string> (CIFilter.FilterNamesInCategories(null)); var nspace = CIFilterType.Namespace; var types = CIFilterType.Assembly.GetTypes(); foreach (Type t in types) { if (t.Namespace != nspace) { continue; } if (t.IsAbstract || !CIFilterType.IsAssignableFrom(t)) { continue; } // we need to skip the filters that are not supported by the executing version of iOS if (Skip(t)) { continue; } var ctor = t.GetConstructor(Type.EmptyTypes); if ((ctor == null) || ctor.IsAbstract) { continue; } NSObject obj = ctor.Invoke(null) as NSObject; Assert.That(obj.Handle, Is.Not.EqualTo(IntPtr.Zero), t.Name + ".Handle"); #if false // check base type - we might have our own base type or different names, so it's debug only (not failure) var super = new Class(obj.Class.SuperClass).Name; var bt = t.BaseType.Name; if ((super != bt) && (bt == "CIFilter")) // check if we should (like Apple) use a non-default base type for filters { Console.WriteLine("[WARN] {0}.SuperClass == {1} (native) and {2} managed", t.Name, super, bt); } #endif int result = filters.RemoveAll(s => StringComparer.OrdinalIgnoreCase.Compare(t.Name, s) == 0); Assert.That(result, Is.GreaterThan(0), t.Name); } // in case it's a buggy filter we need to try to remove it from the list too for (int i = filters.Count - 1; i >= 0; i--) { if (Skip(filters [i])) { filters.RemoveAt(i); } } Assert.That(filters.Count, Is.EqualTo(0), "Managed filters not found for {0}", String.Join(", ", filters)); }
protected override void OnAttached() { if (Container is NSView) { var control = (Container as NSView); control.WantsLayer = true; control.LayerUsesCoreImageFilters = true; filter = CIFilter.FromName("CIGaussianBlur"); filter.SetDefaults(); } }
CIImage CreateQRForString(NSString qrString, QRCodeCorrectionLevel level) { NSData strData = qrString.Encode(NSStringEncoding.UTF8); // 创建filter CIFilter qrFilter = CIFilter.FromName("CIQRCodeGenerator"); // 设置内容和纠错级别 qrFilter.SetValueForKey(strData, new NSString("inputMessage")); qrFilter.SetValueForKey(new NSString(level.ToString()), new NSString("inputCorrectionLevel")); // 返回CIImage return(qrFilter.OutputImage); }
public static UIImage ToFilter(UIImage source, CIFilter filter) { using (var context = CIContext.FromOptions(new CIContextOptions { UseSoftwareRenderer = false })) using (var inputImage = CIImage.FromCGImage(source.CGImage)) { filter.Image = inputImage; using (var resultImage = context.CreateCGImage(filter.OutputImage, inputImage.Extent)) { return new UIImage(resultImage); } } }
UIImage ApplyEffect(CIFilter effect, UIImage originalImage) { var holder = effect.OutputImage; CGRect extent = holder.Extent; var context = CIContext.FromOptions(null); var cgImage = context.CreateCGImage(holder, extent); var fixedImage = UIImage.FromImage(cgImage, originalImage.CurrentScale, originalImage.Orientation); return(fixedImage); //cgImage.Dispose(); }
void ApplyFilter(CIFilter filter) { // Prepare the options to pass when requesting to edit the image. var options = new PHContentEditingInputRequestOptions(); options.SetCanHandleAdjustmentDataHandler(adjustmentData => { bool result = false; InvokeOnMainThread(() => { result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0"; }); return(result); }); Asset.RequestContentEditingInput(options, (contentEditingInput, requestStatusInfo) => { // Create a CIImage from the full image representation. var url = contentEditingInput.FullSizeImageUrl; int orientation = (int)contentEditingInput.FullSizeImageOrientation; var inputImage = CIImage.FromUrl(url); inputImage = inputImage.CreateWithOrientation((CIImageOrientation)orientation); // Create the filter to apply. filter.SetDefaults(); filter.Image = inputImage; // Apply the filter. CIImage outputImage = filter.OutputImage; // Create a PHAdjustmentData object that describes the filter that was applied. var adjustmentData = new PHAdjustmentData( AdjustmentFormatIdentifier, "1.0", NSData.FromString(filter.Name, NSStringEncoding.UTF8) ); var contentEditingOutput = new PHContentEditingOutput(contentEditingInput); NSData jpegData = outputImage.GetJpegRepresentation(0.9f); jpegData.Save(contentEditingOutput.RenderedContentUrl, true); contentEditingOutput.AdjustmentData = adjustmentData; // Ask the shared PHPhotoLinrary to perform the changes. PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { var request = PHAssetChangeRequest.ChangeRequest(Asset); request.ContentEditingOutput = contentEditingOutput; }, (success, error) => { if (!success) { Console.WriteLine("Error: {0}", error.LocalizedDescription); } }); }); }
void UpdateFilter() { TryDisposeFilterInput(); TryDisposeFilter(); ciFilter = CIFilter.FromName(selectedFilterName); CIImageOrientation orientation = Convert(inputImage.Orientation); using (CGImage cgImage = inputImage.CGImage) { using (CIImage ciInputImage = CIImage.FromCGImage(cgImage)) ciFilter.Image = ciInputImage.CreateWithOrientation(orientation); } }
public static NSImage ToFilter(NSImage source, CIFilter filter) { using (var inputImage = CIImage.FromCGImage(source.CGImage)) { filter.Image = inputImage; using (var resultImage = new NSCIImageRep(filter.OutputImage)) { var nsImage = new NSImage(resultImage.Size); nsImage.AddRepresentation(resultImage); return(nsImage); } } }
public static NSImage Tint(this NSImage image, NSColor tint) { CIFilter colorGenerator = CIFilter.FromName("CIConstantColorGenerator"); CIColor color = CIColor.FromCGColor(tint.ToCG()); colorGenerator.SetValueForKey(color, CIFilterInputKey.Color); CIFilter colorFilter = CIFilter.FromName("CIColorControls"); colorFilter.SetValueForKey(colorGenerator.ValueForKey(CIFilterOutputKey.Image), CIFilterInputKey.Image); colorFilter.SetValueForKey(NSNumber.FromFloat(3f), CIFilterInputKey.Saturation); colorFilter.SetValueForKey(NSNumber.FromFloat(0.35f), CIFilterInputKey.Brightness); colorFilter.SetValueForKey(NSNumber.FromFloat(1f), CIFilterInputKey.Contrast); CIFilter monochromeFilter = CIFilter.FromName("CIColorMonochrome"); CIImage baseImage = CIImage.FromCGImage(image.CGImage); monochromeFilter.SetValueForKey(baseImage, CIFilterInputKey.Image); monochromeFilter.SetValueForKey(CIColor.FromRgb(0.75f, 0.75f, 0.75f), CIFilterInputKey.Color); monochromeFilter.SetValueForKey(NSNumber.FromFloat(1f), CIFilterInputKey.Intensity); CIFilter compositingFilter = CIFilter.FromName("CIMultiplyCompositing"); compositingFilter.SetValueForKey(colorFilter.ValueForKey(CIFilterOutputKey.Image), CIFilterInputKey.Image); compositingFilter.SetValueForKey(monochromeFilter.ValueForKey(CIFilterOutputKey.Image), CIFilterInputKey.BackgroundImage); CIImage outputImage = (CIImage)compositingFilter.ValueForKey(CIFilterOutputKey.Image); var extent = outputImage.Extent; var newsize = sd.Size.Truncate(extent.Size); var tintedImage = new NSImage(newsize); var newrep = new NSBitmapImageRep(IntPtr.Zero, newsize.Width, newsize.Height, 8, 4, true, false, NSColorSpace.DeviceRGB, 4 * newsize.Width, 32); tintedImage.AddRepresentation(newrep); var graphics = NSGraphicsContext.FromBitmap(newrep); NSGraphicsContext.GlobalSaveGraphicsState(); NSGraphicsContext.CurrentContext = graphics; var ciContext = CIContext.FromContext(graphics.GraphicsPort, new CIContextOptions { UseSoftwareRenderer = true }); ciContext.DrawImage(outputImage, extent, extent); NSGraphicsContext.GlobalRestoreGraphicsState(); newrep.Size = image.Size; return(tintedImage); }
public static UIImage ToFilter(UIImage source, CIFilter filter) { using (var context = CIContext.FromOptions(new CIContextOptions { UseSoftwareRenderer = false })) using (var inputImage = CIImage.FromCGImage(source.CGImage)) { filter.Image = inputImage; using (var resultImage = context.CreateCGImage(filter.OutputImage, inputImage.Extent)) { return(new UIImage(resultImage)); } } }
public static void Colourize(NSView control, Color color, Action drawAction) { var size = control.Frame.Size; var image = new NSImage(size); image.LockFocusFlipped(control.IsFlipped); drawAction(); image.UnlockFocus(); var ciImage = CIImage.FromData(image.AsTiff()); if (control.IsFlipped) { var realSize = control.ConvertSizeToBase(size); var affineTransform = new NSAffineTransform(); affineTransform.Translate(0, realSize.Height); affineTransform.Scale(1, -1); var filter1 = CIFilter.FromName("CIAffineTransform"); filter1.SetValueForKey(ciImage, CIInputImage); filter1.SetValueForKey(affineTransform, CIInputTransform); ciImage = filter1.ValueForKey(CIOutputImage) as CIImage; } var filter2 = CIFilter.FromName("CIColorControls"); filter2.SetDefaults(); filter2.SetValueForKey(ciImage, CIInputImage); filter2.SetValueForKey(new NSNumber(0.0f), CIInputSaturation); ciImage = filter2.ValueForKey(CIOutputImage) as CIImage; var filter3 = CIFilter.FromName("CIColorMatrix"); filter3.SetDefaults(); filter3.SetValueForKey(ciImage, CIInputImage); filter3.SetValueForKey(new CIVector(0, color.R, 0), CIInputRVector); filter3.SetValueForKey(new CIVector(color.G, 0, 0), CIInputGVector); filter3.SetValueForKey(new CIVector(0, 0, color.B), CIInputBVector); ciImage = filter3.ValueForKey(CIOutputImage) as CIImage; image = new NSImage(size); var rep = NSCIImageRep.FromCIImage(ciImage); image.AddRepresentation(rep); image.Draw(SD.PointF.Empty, new SD.RectangleF(SD.PointF.Empty, size), NSCompositingOperation.SourceOver, 1); /* Use this when implemented in maccore: * ciImage.Draw (SD.PointF.Empty, new SD.RectangleF (SD.PointF.Empty, size), NSCompositingOperation.SourceOver, 1); */ }
UIImage machineReadableCodeFromMessage(string message) { var mrcFilter = CIFilter.FromName("CIQRCodeGenerator"); NSData messageData = NSData.FromString(new NSString(message), NSStringEncoding.UTF8); mrcFilter.SetValueForKey(messageData, (NSString)"inputMessage"); var barcodeCIImage = (CIImage)mrcFilter.ValueForKey((NSString)"outputImage"); CGRect extent = barcodeCIImage.Extent; CGImage barcodeCGImage = CIContext.CreateCGImage(barcodeCIImage, extent); UIImage image = new UIImage(barcodeCGImage); return(image); }
CIImage WillDisplayImage(QTCaptureView view, CIImage image) { if (description == null) { return(image); } var selectedFilter = (NSString)description [filterNameKey]; var filter = CIFilter.FromName(selectedFilter); filter.SetDefaults(); filter.Image = image; return(filter.OutputImage); }
/// <summary> /// Tint the icon with the specified colour /// </summary> /// <param name="colour">Colour to tint icon with</param> public void TintIcon(CIColor colour) { // Use CoreImage to tint the icon var statusImage = CIImage.FromUrl(NSUrl.FromFilename(NSBundle.MainBundle.PathForResource( Path.GetFileNameWithoutExtension(HighlightImagePath), Path.GetExtension(HighlightImagePath)))); var tintImage = CIImage.ImageWithColor(colour); var filter = CIFilter.FromName("CIMultiplyCompositing"); filter.SetValueForKey(tintImage, (NSString)"inputImage"); filter.SetValueForKey(statusImage, (NSString)"inputBackgroundImage"); var processedImage = (CIImage)filter.ValueForKey((NSString)"outputImage"); var outputImage = processedImage.ToNSImage(); _statusItemView.Image = outputImage; }
void ApplyFilter() { CIVector center = CIVector.Create(Bounds.GetMidX(), Bounds.GetMidY()); CIFilter torus = CIFilter.FromName("CITorusLensDistortion"); var keys = new NSString[] { CIFilterInputKey.Center, CIFilterInputKey.Radius, CIFilterInputKey.Width, CIFilterInputKey.Refraction }; var values = new NSObject[] { center, NSNumber.FromFloat(150), NSNumber.FromFloat(2), NSNumber.FromFloat(1.7f) }; torus.SetValuesForKeysWithDictionary(NSDictionary.FromObjectsAndKeys(values, keys)); controls.BackgroundFilters = new CIFilter[] { torus }; AddAnimationToTorusFilter(); }
void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load the full size image. var inputImage = new CIImage(input.FullSizeImageUrl); // Apply the filter. filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation); var outputImage = filter.OutputImage; // Write the edited image as a JPEG. // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503 NSError error; if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace(), new NSDictionary(), out error)) { throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}"); } completion(); }
static void HandleColorCubeFilter(CIFilter filter) { if (filter.Name != "CIColorCube") return; int dimension = 64; // Must be power of 2, max of 128 (max of 64 on ios) int cubeDataSize = 4 * dimension * dimension * dimension; filter[new NSString("inputCubeDimension")] = new NSNumber(dimension); // 2 : 32 /4 = 8 = 2^3 // 4 : 256 /4 = 64 = 4^3 // 8 : 2048 /4 = 512 = 8^3 var cubeData = new byte[cubeDataSize]; var rnd = new Random(); rnd.NextBytes(cubeData); for (int i = 3; i < cubeDataSize; i += 4) cubeData[i] = 255; filter[new NSString("inputCubeData")] = NSData.FromArray(cubeData); }
void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // This app filters assets only for output. In an app that previews // filters while editing, create a livePhotoContext early and reuse it // to render both for previewing and for final output. var livePhotoContext = new PHLivePhotoEditingContext(input); livePhotoContext.FrameProcessor = (frame, _) => { filter.Image = frame.Image; return(filter.OutputImage); }; livePhotoContext.SaveLivePhoto(output, null, (success, error) => { if (success) { completion(); } else { Console.WriteLine("can't output live photo"); } }); }
public void DrawImage(Image image, Rectangle destRect, float srcX, float srcY, float srcWidth, float srcHeight, GraphicsUnit srcUnit, ImageAttributes imageAttrs) { if (image == null) { throw new ArgumentNullException("image"); } var srcRect1 = new RectangleF(srcX, srcY, srcWidth, srcHeight); // If the source units are not the same we need to convert them // The reason we check for Pixel here is that our graphics already has the Pixel's baked into the model view transform if (srcUnit != graphicsUnit && srcUnit != GraphicsUnit.Pixel) { ConversionHelpers.GraphicsUnitConversion(srcUnit, graphicsUnit, image.HorizontalResolution, image.VerticalResolution, ref srcRect1); } if (image.NativeCGImage == null) { DrawImage(image, destRect); return; } // Obtain the subImage var subImage = image.NativeCGImage.WithImageInRect(srcRect1.ToCGRect()); // If we do not have anything to draw then we exit here if (subImage.Width == 0 || subImage.Height == 0) { return; } // var transform = image.imageTransform; //// // Reset our height on the transform to account for subImage // transform.y0 = subImage.Height; //// //// // Make sure we scale the image in case the source rectangle //// // overruns our subimage bouncs width and/or height // float scaleX = subImage.Width/srcRect1.Width; // float scaleY = subImage.Height/srcRect1.Height; // transform.Scale (scaleX, scaleY); bool attributesSet = imageAttrs != null && (imageAttrs.isColorMatrixSet || imageAttrs.isGammaSet); if (attributesSet) { InitializeImagingContext(); CIImage result = subImage; if (imageAttrs.isColorMatrixSet) { var ciFilter = CIFilter.FromName("CIColorMatrix"); ciFilter.SetDefaults(); ciFilter.SetValueForKey(result, new NSString("inputImage")); var inputRVector = new CIVector(imageAttrs.colorMatrix.Matrix00, imageAttrs.colorMatrix.Matrix01, imageAttrs.colorMatrix.Matrix02, imageAttrs.colorMatrix.Matrix03); var inputGVector = new CIVector(imageAttrs.colorMatrix.Matrix10, imageAttrs.colorMatrix.Matrix11, imageAttrs.colorMatrix.Matrix12, imageAttrs.colorMatrix.Matrix13); var inputBVector = new CIVector(imageAttrs.colorMatrix.Matrix20, imageAttrs.colorMatrix.Matrix21, imageAttrs.colorMatrix.Matrix22, imageAttrs.colorMatrix.Matrix23); var inputAVector = new CIVector(imageAttrs.colorMatrix.Matrix30, imageAttrs.colorMatrix.Matrix31, imageAttrs.colorMatrix.Matrix32, imageAttrs.colorMatrix.Matrix33); var inputBiasVector = new CIVector(imageAttrs.colorMatrix.Matrix40, imageAttrs.colorMatrix.Matrix41, imageAttrs.colorMatrix.Matrix42, imageAttrs.colorMatrix.Matrix43); ciFilter.SetValueForKey(inputRVector, new NSString("inputRVector")); ciFilter.SetValueForKey(inputGVector, new NSString("inputGVector")); ciFilter.SetValueForKey(inputBVector, new NSString("inputBVector")); ciFilter.SetValueForKey(inputAVector, new NSString("inputAVector")); ciFilter.SetValueForKey(inputBiasVector, new NSString("inputBiasVector")); result = (CIImage)ciFilter.ValueForKey(new NSString("outputImage")); } if (imageAttrs.isGammaSet) { var ciFilter = CIFilter.FromName("CIGammaAdjust"); ciFilter.SetDefaults(); ciFilter.SetValueForKey(result, new NSString("inputImage")); var inputPower = NSNumber.FromFloat(imageAttrs.gamma); ciFilter.SetValueForKey(inputPower, new NSString("inputPower")); result = (CIImage)ciFilter.ValueForKey(new NSString("outputImage")); } subImage = ciContext.CreateCGImage(result, result.Extent); } transform = image.imageTransform; transform.y0 = subImage.Height; float scaleX1 = subImage.Width / srcRect1.Width; float scaleY1 = subImage.Height / srcRect1.Height; transform.Scale(scaleX1, scaleY1); // Now draw our image DrawImage(destRect, subImage, transform); }
static void SetImage(CIFilter filter, string name) { if (filter.InputKeys.Contains(name)) { var uiImage = new UIImage("Images/" + name + ".jpg"); var inputImage = CIImage.FromCGImage(uiImage.CGImage); filter[new NSString(name)] = inputImage; } }
public override void PresentStep(int switchIndex, PresentationViewController presentationViewController) { switch (switchIndex) { case 0: // Set the slide's title and subtitle and add some text TextManager.SetTitle("Core Image"); TextManager.SetSubtitle("CI Filters"); TextManager.AddBulletAtLevel("Screen-space effects", 0); TextManager.AddBulletAtLevel("Applies to a node hierarchy", 0); TextManager.AddBulletAtLevel("Filter parameters are animatable", 0); TextManager.AddCode("#aNode.#Filters# = new CIFilter[] { filter1, filter2 };#"); break; case 1: SCNTransaction.Begin(); SCNTransaction.AnimationDuration = 1.0f; // Dim the text and move back a little TextManager.TextNode.Opacity = 0.0f; presentationViewController.CameraHandle.Position = presentationViewController.CameraNode.ConvertPositionToNode(new SCNVector3(0, 0, 5.0f), presentationViewController.CameraHandle.ParentNode); SCNTransaction.Commit(); // Reveal the grid GroupNode.Opacity = 1; break; case 2: SCNTransaction.Begin(); SCNTransaction.AnimationDuration = 1; // Highlight an item HighlightContact(13, presentationViewController); SCNTransaction.Commit(); break; case 3: var index = 13; var subStep = 0; // Successively select items for (var i = 0; i < 5; ++i) { var popTime = new DispatchTime(DispatchTime.Now, (long)(i * 0.2 * Utils.NSEC_PER_SEC)); DispatchQueue.MainQueue.DispatchAfter(popTime, () => { SCNTransaction.Begin(); SCNTransaction.AnimationDuration = 0.2f; UnhighlightContact(index); if (subStep++ == 3) { index += ColumnCount; } else { index++; } HighlightContact(index, presentationViewController); SCNTransaction.Commit(); }); } break; case 4: // BLUR+DESATURATE in the background, GLOW in the foreground // Here we will change the node hierarchy in order to group all the nodes in the background under a single node. // This way we can use a single Core Image filter and apply it on the whole grid, and have another CI filter for the node in the foreground. var selectionParent = HeroNode.ParentNode; SCNTransaction.Begin(); SCNTransaction.AnimationDuration = 0; // Stop the animations of the selected node HeroNode.Transform = HeroNode.PresentationNode.Transform; // set the current rotation to the current presentation value HeroNode.RemoveAllAnimations(); // Re-parent the node by preserving its world tranform var wantedWorldTransform = selectionParent.WorldTransform; GroupNode.ParentNode.AddChildNode(selectionParent); selectionParent.Transform = selectionParent.ParentNode.ConvertTransformFromNode(wantedWorldTransform, null); SCNTransaction.Commit(); // Add CIFilters SCNTransaction.Begin(); SCNTransaction.AnimationDuration = 1; // A negative 'centerX' value means no scaling. //TODO HeroNode.Filters [0].SetValueForKey (new NSNumber (-1), new NSString ("centerX")); // Move the selection to the foreground selectionParent.Rotation = new SCNVector4(0, 1, 0, 0); HeroNode.Transform = ContentNode.ConvertTransformToNode(SCNMatrix4.CreateTranslation(0, Altitude, 29), selectionParent); HeroNode.Scale = new SCNVector3(1, 1, 1); HeroNode.Rotation = new SCNVector4(1, 0, 0, -(float)(Math.PI / 4) * 0.25f); // Upon completion, rotate the selection forever SCNTransaction.SetCompletionBlock(() => { var animation = CABasicAnimation.FromKeyPath("rotation"); animation.Duration = 4.0f; animation.From = NSValue.FromVector(new SCNVector4(0, 1, 0, 0)); animation.To = NSValue.FromVector(new SCNVector4(0, 1, 0, NMath.PI * 2)); animation.TimingFunction = CAMediaTimingFunction.FromName(CAMediaTimingFunction.EaseInEaseOut); animation.RepeatCount = float.MaxValue; HeroNode.ChildNodes [0].AddAnimation(animation, new NSString("heroNodeAnimation")); }); // Add the filters var blurFilter = CIFilter.FromName("CIGaussianBlur"); blurFilter.SetDefaults(); blurFilter.Name = "blur"; blurFilter.SetValueForKey(new NSNumber(0), CIFilterInputKey.Radius); var desaturateFilter = CIFilter.FromName("CIColorControls"); desaturateFilter.SetDefaults(); desaturateFilter.Name = "desaturate"; GroupNode.Filters = new CIFilter[] { blurFilter, desaturateFilter }; SCNTransaction.Commit(); // Increate the blur radius and desaturate progressively SCNTransaction.Begin(); SCNTransaction.AnimationDuration = 2; GroupNode.SetValueForKey(new NSNumber(10), new NSString("filters.blur.inputRadius")); GroupNode.SetValueForKey(new NSNumber(0.1), new NSString("filters.desaturate.inputSaturation")); SCNTransaction.Commit(); break; } }
public void Keys() { Errors = 0; ContinueOnFailure = true; var nspace = CIFilterType.Namespace; var types = CIFilterType.Assembly.GetTypes(); foreach (Type t in types) { if (t.Namespace != nspace) { continue; } if (t.IsAbstract || !CIFilterType.IsAssignableFrom(t)) { continue; } // we need to skip the filters that are not supported by the executing version of iOS if (Skip(t)) { continue; } var ctor = t.GetConstructor(Type.EmptyTypes); if ((ctor == null) || ctor.IsAbstract) { continue; } CIFilter f = ctor.Invoke(null) as CIFilter; // first check that every property can be mapped to an input key - except if it starts with "Output" foreach (var p in t.GetProperties(BindingFlags.Public | BindingFlags.Instance)) { var pt = p.DeclaringType; if (!CIFilterType.IsAssignableFrom(pt) || (pt == CIFilterType)) { continue; } if (SkipDueToAttribute(p)) { continue; } var getter = p.GetGetMethod(); var ea = getter.GetCustomAttribute <ExportAttribute> (false); // only properties coming (inlined) from protocols have an [Export] attribute if (ea == null) { continue; } var key = ea.Selector; // 'output' is always explicit if (key.StartsWith("output", StringComparison.Ordinal)) { if (Array.IndexOf(f.OutputKeys, key) < 0) { ReportError($"{t.Name}: Property `{p.Name}` mapped to key `{key}` is not part of `OutputKeys`."); //GenerateBinding (f, Console.Out); } } else { // special cases (protocol names are better) switch (t.Name) { case "CIBicubicScaleTransform": switch (key) { case "parameterB": key = "inputB"; break; case "parameterC": key = "inputC"; break; } break; case "CICmykHalftone": switch (key) { case "grayComponentReplacement": key = "inputGCR"; break; case "underColorRemoval": key = "inputUCR"; break; } break; case "CIGlassDistortion": switch (key) { case "textureImage": key = "texture"; break; } break; } // 'input' is implied (generally) and explicit (in a few cases) if (!key.StartsWith("input", StringComparison.Ordinal)) { key = "input" + Char.ToUpperInvariant(key [0]) + key.Substring(1); } if (Array.IndexOf(f.InputKeys, key) < 0) { ReportError($"{t.Name}: Property `{p.Name}` mapped to key `{key}` is not part of `InputKeys`."); //GenerateBinding (f, Console.Out); } } } // second check that every input key is mapped to an property foreach (var key in f.InputKeys) { string cap = Char.ToUpperInvariant(key [0]) + key.Substring(1); // special cases (protocol names are better) switch (t.Name) { case "CICmykHalftone": switch (key) { case "inputGCR": cap = "GrayComponentReplacement"; break; case "inputUCR": cap = "UnderColorRemoval"; break; } break; case "CIAccordionFoldTransition": switch (key) { case "inputNumberOfFolds": cap = "FoldCount"; break; } break; case "CIBicubicScaleTransform": switch (key) { case "inputB": cap = "ParameterB"; break; case "inputC": cap = "ParameterC"; break; } break; } // IgnoreCase because there are acronyms (more than 2 letters) that naming convention force us to change var pi = t.GetProperty(cap, BindingFlags.IgnoreCase | BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy); if (pi == null) { // 2nd chance: some, but not all, property are prefixed by `Input` if (key.StartsWith("input", StringComparison.Ordinal)) { cap = Char.ToUpperInvariant(key [5]) + key.Substring(6); pi = t.GetProperty(cap, BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy); } } if (pi == null) { ReportError($"{t.Name}: Input Key `{key}` is NOT mapped to a `{cap}` property."); //GenerateBinding (f, Console.Out); } else if (pi.GetSetMethod() == null) { ReportError($"{t.Name}: Property `{pi.Name}` MUST have a setter."); } } // third check that every output key is mapped to an property foreach (var key in f.OutputKeys) { // special cases switch (t.Name) { case "CIKeystoneCorrectionCombined": case "CIKeystoneCorrectionHorizontal": case "CIKeystoneCorrectionVertical": switch (key) { case "outputRotationFilter": continue; // lack of documentation about the returned type } break; case "CILanczosScaleTransform": switch (key) { // ref: https://github.com/xamarin/xamarin-macios/issues/7209 case "outputImageNewScaleX:scaleY:": case "outputImageOldScaleX:scaleY:": continue; } break; case "CIDiscBlur": switch (key) { // existed in iOS 10.3 but not in iOS 13 - we're not adding them case "outputImageOriginal": case "outputImageEnhanced": continue; } break; case "CIGaussianBlur": switch (key) { case "outputImageV1": // existed briefly in macOS 10.11, but neither before nor after. continue; } break; case "CIAreaAverage": case "CIAreaHistogram": case "CIAreaMinMax": switch (key) { case "outputImageMPS": case "outputImageMPS:": case "outputImageNonMPS:": // no doc for argument continue; } break; } var cap = Char.ToUpperInvariant(key [0]) + key.Substring(1); // IgnoreCase because there are acronyms (more than 2 letters) that naming convention force us to change var po = t.GetProperty(cap, BindingFlags.IgnoreCase | BindingFlags.Public | BindingFlags.Instance | BindingFlags.FlattenHierarchy); if (po == null) { ReportError($"{t.Name}: Output Key `{key}` is NOT mapped to a `{cap}` property."); //GenerateBinding (f, Console.Out); } else if (po.GetSetMethod() != null) { ReportError($"{t.Name}: Property `{po.Name}` should NOT have a setter."); } } } Assert.AreEqual(0, Errors, "{0} potential errors found{1}", Errors, Errors == 0 ? string.Empty : ":\n" + ErrorData.ToString() + "\n"); }
static void PrintFilterInfo(CIFilter filter) { Console.WriteLine(filter.Name); foreach (var key in filter.InputKeys) { var attributes = (NSDictionary)filter.Attributes[new NSString(key)]; var attributeClass = attributes[new NSString("CIAttributeClass")]; Console.WriteLine(" {0} : {1}", key, attributeClass); } }
void ApplyFilter (CIFilter filter) { // Set up a handler to make sure we can handle prior edits. var options = new PHContentEditingInputRequestOptions (); options.CanHandleAdjustmentData = (adjustmentData => { return adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion; }); // Prepare for editing. Asset.RequestContentEditingInput (options, (input, requestStatusInfo) => { if (input == null) throw new InvalidProgramException ($"can't get content editing input: {requestStatusInfo}"); // This handler gets called on the main thread; dispatch to a background queue for processing. DispatchQueue.GetGlobalQueue (DispatchQueuePriority.Default).DispatchAsync (() => { // Create a PHAdjustmentData object that describes the filter that was applied. var adjustmentData = new PHAdjustmentData ( formatIdentifier, formatVersion, NSData.FromString (filter.Name, NSStringEncoding.UTF8)); // NOTE: // This app's filter UI is fire-and-forget. That is, the user picks a filter, // and the app applies it and outputs the saved asset immediately. There's // no UI state for having chosen but not yet committed an edit. This means // there's no role for reading adjustment data -- you do that to resume // in-progress edits, and this sample app has no notion of "in-progress". // // However, it's still good to write adjustment data so that potential future // versions of the app (or other apps that understand our adjustement data // format) could make use of it. // Create content editing output, write the adjustment data. var output = new PHContentEditingOutput (input) { AdjustmentData = adjustmentData }; // Select a filtering function for the asset's media type. Action<CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc; if (Asset.MediaSubtypes.HasFlag (PHAssetMediaSubtype.PhotoLive)) applyFunc = ApplyLivePhotoFilter; else if (Asset.MediaType == PHAssetMediaType.Image) applyFunc = ApplyPhotoFilter; else applyFunc = ApplyVideoFilter; // Apply the filter. applyFunc (filter, input, output, () => { // When rendering is done, commit the edit to the Photos library. PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => { var request = PHAssetChangeRequest.ChangeRequest (Asset); request.ContentEditingOutput = output; }, (success, error) => { if (!success) Console.WriteLine ($"can't edit asset: {error.LocalizedDescription}"); }); }); }); }); }
private void UpdateFilter () { ciFilter = CIFilter.FromName (selectedFilterName); var inputImage = CIImage.FromCGImage (this.inputImage.CGImage); CIImageOrientation orientation = Convert (this.inputImage.Orientation); inputImage = inputImage.CreateWithOrientation (orientation); ciFilter.Image = inputImage; }
void ApplyLivePhotoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // This app filters assets only for output. In an app that previews // filters while editing, create a livePhotoContext early and reuse it // to render both for previewing and for final output. var livePhotoContext = new PHLivePhotoEditingContext (input); livePhotoContext.FrameProcessor = (frame, _) => { filter.Image = frame.Image; return filter.OutputImage; }; livePhotoContext.SaveLivePhoto (output, null, (success, error) => { if (success) completion (); else Console.WriteLine ("can't output live photo"); }); }
void ApplyPhotoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load the full size image. var inputImage = new CIImage (input.FullSizeImageUrl); // Apply the filter. filter.Image = inputImage.CreateWithOrientation (input.FullSizeImageOrientation); var outputImage = filter.OutputImage; // Write the edited image as a JPEG. // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503 NSError error; if (!ciContext.WriteJpegRepresentation (outputImage, output.RenderedContentUrl, inputImage.ColorSpace (), new NSDictionary(), out error)) throw new InvalidProgramException ($"can't apply filter to image: {error.LocalizedDescription}"); completion (); }
void UpdateFilter() { TryDisposeFilterInput (); TryDisposeFilter (); ciFilter = CIFilter.FromName (selectedFilterName); CIImageOrientation orientation = Convert (inputImage.Orientation); using (CGImage cgImage = inputImage.CGImage) { using (CIImage ciInputImage = CIImage.FromCGImage (cgImage)) ciFilter.Image = ciInputImage.CreateWithOrientation (orientation); } }
/* [Export("initWithCoder:")] * public QTRDocument (NSCoder coder) : base(coder) * { * }*/ public override void WindowControllerDidLoadNib(NSWindowController windowController) { NSError error; base.WindowControllerDidLoadNib(windowController); // Create session session = new QTCaptureSession(); // Attach preview to session captureView.CaptureSession = session; captureView.WillDisplayImage = (view, image) => { if (videoPreviewFilterDescription == null) { return(image); } var selectedFilter = (NSString)videoPreviewFilterDescription [filterNameKey]; var filter = CIFilter.FromName(selectedFilter); filter.SetDefaults(); filter.SetValueForKey(image, CIFilterInputKey.Image); return((CIImage)filter.ValueForKey(CIFilterOutputKey.Image)); }; // Attach outputs to session movieFileOutput = new QTCaptureMovieFileOutput(); movieFileOutput.WillStartRecording += delegate { Console.WriteLine("Will start recording"); }; movieFileOutput.DidStartRecording += delegate { Console.WriteLine("Started Recording"); }; movieFileOutput.ShouldChangeOutputFile = (output, url, connections, reason) => { // Should change the file on error Console.WriteLine(reason.LocalizedDescription); return(false); }; movieFileOutput.MustChangeOutputFile += delegate(object sender, QTCaptureFileErrorEventArgs e) { Console.WriteLine("Must change file due to error"); }; // These ones we care about, some notifications movieFileOutput.WillFinishRecording += delegate(object sender, QTCaptureFileErrorEventArgs e) { Console.WriteLine("Will finish recording"); InvokeOnMainThread(delegate { WillChangeValue("Recording"); }); }; movieFileOutput.DidFinishRecording += delegate(object sender, QTCaptureFileErrorEventArgs e) { Console.WriteLine("Recorded {0} bytes duration {1}", movieFileOutput.RecordedFileSize, movieFileOutput.RecordedDuration); DidChangeValue("Recording"); if (e.Reason != null) { NSAlert.WithError(e.Reason).BeginSheet(Window, () => {}); return; } var save = NSSavePanel.SavePanel; save.AllowedFileTypes = new string[] { "mov" }; save.CanSelectHiddenExtension = true; save.Begin(code => { NSError err2; if (code == (int)NSPanelButtonType.Ok) { NSFileManager.DefaultManager.Move(e.OutputFileURL, save.Url, out err2); } else { NSFileManager.DefaultManager.Remove(e.OutputFileURL.Path, out err2); } }); }; session.AddOutput(movieFileOutput, out error); audioPreviewOutput = new QTCaptureAudioPreviewOutput(); session.AddOutput(audioPreviewOutput, out error); if (VideoDevices.Length > 0) { SelectedVideoDevice = VideoDevices [0]; } if (AudioDevices.Length > 0) { SelectedAudioDevice = AudioDevices [0]; } session.StartRunning(); // events: devices added/removed AddObserver(QTCaptureDevice.WasConnectedNotification, DevicesDidChange); AddObserver(QTCaptureDevice.WasDisconnectedNotification, DevicesDidChange); // events: connection format changes AddObserver(QTCaptureConnection.FormatDescriptionDidChangeNotification, FormatDidChange); AddObserver(QTCaptureConnection.FormatDescriptionWillChangeNotification, FormatWillChange); AddObserver(QTCaptureDevice.AttributeDidChangeNotification, AttributeDidChange); AddObserver(QTCaptureDevice.AttributeWillChangeNotification, AttributeWillChange); }
void TryDisposeFilter() { if (ciFilter == null) return; ciFilter.Dispose (); ciFilter = null; }
void ApplyVideoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion) { // Load AVAsset to process from input. var avAsset = input.AudiovisualAsset; if (avAsset == null) throw new InvalidProgramException ("can't get AV asset to edit"); // Set up a video composition to apply the filter. var composition = AVVideoComposition.CreateVideoComposition (avAsset, request => { filter.Image = request.SourceImage; var filtered = filter.OutputImage; request.Finish (filtered, null); }); // Export the video composition to the output URL. // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443 var export = new AVAssetExportSession (avAsset, AVAssetExportSession.PresetHighestQuality) { OutputFileType = AVFileType.QuickTimeMovie, OutputUrl = output.RenderedContentUrl, VideoComposition = composition }; export.ExportAsynchronously (completion); }
void ApplyFilter (CIFilter filter) { // Prepare the options to pass when requesting to edit the image. var options = new PHContentEditingInputRequestOptions (); options.SetCanHandleAdjustmentDataHandler (adjustmentData => { bool result = false; InvokeOnMainThread (() => { result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0"; }); return result; }); Asset.RequestContentEditingInput (options,(contentEditingInput, requestStatusInfo) => { // Create a CIImage from the full image representation. var url = contentEditingInput.FullSizeImageUrl; int orientation = (int)contentEditingInput.FullSizeImageOrientation; var inputImage = CIImage.FromUrl (url); inputImage = inputImage.CreateWithOrientation ((CIImageOrientation)orientation); // Create the filter to apply. filter.SetDefaults (); filter.Image = inputImage; // Apply the filter. CIImage outputImage = filter.OutputImage; // Create a PHAdjustmentData object that describes the filter that was applied. var adjustmentData = new PHAdjustmentData ( AdjustmentFormatIdentifier, "1.0", NSData.FromString (filter.Name, NSStringEncoding.UTF8) ); var contentEditingOutput = new PHContentEditingOutput (contentEditingInput); NSData jpegData = outputImage.GetJpegRepresentation (0.9f); jpegData.Save (contentEditingOutput.RenderedContentUrl, true); contentEditingOutput.AdjustmentData = adjustmentData; // Ask the shared PHPhotoLinrary to perform the changes. PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => { var request = PHAssetChangeRequest.ChangeRequest (Asset); request.ContentEditingOutput = contentEditingOutput; }, (success, error) => { if (!success) Console.WriteLine ("Error: {0}", error.LocalizedDescription); }); }); }
// // Utility function used by pure-output generation filters // public CIImage Crop (CIFilter input) { return new CICrop () { Image = input.OutputImage, Rectangle = new CIVector (0, 0, window.Bounds.Width, window.Bounds.Height) }.OutputImage; }
void ApplyFilter(CIFilter filter) { // Set up a handler to make sure we can handle prior edits. var options = new PHContentEditingInputRequestOptions(); options.CanHandleAdjustmentData = (adjustmentData => { return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion); }); // Prepare for editing. Asset.RequestContentEditingInput(options, (input, requestStatusInfo) => { if (input == null) { throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}"); } // This handler gets called on the main thread; dispatch to a background queue for processing. DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() => { // Create a PHAdjustmentData object that describes the filter that was applied. var adjustmentData = new PHAdjustmentData( formatIdentifier, formatVersion, NSData.FromString(filter.Name, NSStringEncoding.UTF8)); // NOTE: // This app's filter UI is fire-and-forget. That is, the user picks a filter, // and the app applies it and outputs the saved asset immediately. There's // no UI state for having chosen but not yet committed an edit. This means // there's no role for reading adjustment data -- you do that to resume // in-progress edits, and this sample app has no notion of "in-progress". // // However, it's still good to write adjustment data so that potential future // versions of the app (or other apps that understand our adjustement data // format) could make use of it. // Create content editing output, write the adjustment data. var output = new PHContentEditingOutput(input) { AdjustmentData = adjustmentData }; // Select a filtering function for the asset's media type. Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc; if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive)) { applyFunc = ApplyLivePhotoFilter; } else if (Asset.MediaType == PHAssetMediaType.Image) { applyFunc = ApplyPhotoFilter; } else { applyFunc = ApplyVideoFilter; } // Apply the filter. applyFunc(filter, input, output, () => { // When rendering is done, commit the edit to the Photos library. PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { var request = PHAssetChangeRequest.ChangeRequest(Asset); request.ContentEditingOutput = output; }, (success, error) => { if (!success) { Console.WriteLine($"can't edit asset: {error.LocalizedDescription}"); } }); }); }); }); }