void ApplyNoirFilter(object sender, EventArgs e)
        {
            Asset.RequestContentEditingInput(new PHContentEditingInputRequestOptions(), (input, options) => {
                // perform the editing operation, which applies a noir filter in this case
                var image = CIImage.FromUrl(input.FullSizeImageUrl);
                image     = image.CreateWithOrientation((CIImageOrientation)input.FullSizeImageOrientation);
                var noir  = new CIPhotoEffectNoir {
                    Image = image
                };
                var ciContext = CIContext.FromOptions(null);
                var output    = noir.OutputImage;

                var uiImage     = UIImage.FromImage(ciContext.CreateCGImage(output, output.Extent));
                imageView.Image = uiImage;

                // save the filtered image data to a PHContentEditingOutput instance
                var editingOutput  = new PHContentEditingOutput(input);
                var adjustmentData = new PHAdjustmentData();
                var data           = uiImage.AsJPEG();
                NSError error;
                data.Save(editingOutput.RenderedContentUrl, false, out error);
                editingOutput.AdjustmentData = adjustmentData;

                // make a change request to publish the changes form the editing output
                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(
                    () => {
                    PHAssetChangeRequest request = PHAssetChangeRequest.ChangeRequest(Asset);
                    request.ContentEditingOutput = editingOutput;
                },
                    (ok, err) => Console.WriteLine("photo updated successfully: {0}", ok));
            });
        }
Esempio n. 2
0
        void ApplyNoirFilter(object sender, EventArgs e)
        {
            Asset.RequestContentEditingInput (new PHContentEditingInputRequestOptions (), (input, options) => {

                // perform the editing operation, which applies a noir filter in this case
                var image = CIImage.FromUrl (input.FullSizeImageUrl);
                image = image.CreateWithOrientation ((CIImageOrientation)input.FullSizeImageOrientation);
                var noir = new CIPhotoEffectNoir {
                    Image = image
                };
                var ciContext = CIContext.FromOptions (null);
                var output = noir.OutputImage;

                var uiImage = UIImage.FromImage (ciContext.CreateCGImage (output, output.Extent));
                imageView.Image = uiImage;

                // save the filtered image data to a PHContentEditingOutput instance
                var editingOutput = new PHContentEditingOutput (input);
                var adjustmentData = new PHAdjustmentData ();
                var data = uiImage.AsJPEG ();
                NSError error;
                data.Save (editingOutput.RenderedContentUrl, false, out error);
                editingOutput.AdjustmentData = adjustmentData;

                // make a change request to publish the changes form the editing output
                PHPhotoLibrary.GetSharedPhotoLibrary.PerformChanges (
                    () => {
                        PHAssetChangeRequest request = PHAssetChangeRequest.ChangeRequest (Asset);
                        request.ContentEditingOutput = editingOutput;
                    },
                    (ok, err) => Console.WriteLine ("photo updated successfully: {0}", ok));
            });
        }
        public bool CanHandleAdjustmentData(PHAdjustmentData adjustmentData)
        {
            // Inspect the adjustmentData to determine whether your extension can work with past edits.
            // (Typically, you use its formatIdentifier and formatVersion properties to do this.)
            bool result = adjustmentData.FormatIdentifier == BundleId;

            result &= adjustmentData.FormatVersion == "1.0";
            return(result);
        }
        void ApplyFilter(CIFilter filter)
        {
            // Prepare the options to pass when requesting to edit the image.
            var options = new PHContentEditingInputRequestOptions();

            options.SetCanHandleAdjustmentDataHandler(adjustmentData => {
                bool result = false;
                InvokeOnMainThread(() => {
                    result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0";
                });

                return(result);
            });

            Asset.RequestContentEditingInput(options, (contentEditingInput, requestStatusInfo) => {
                // Create a CIImage from the full image representation.
                var url         = contentEditingInput.FullSizeImageUrl;
                int orientation = (int)contentEditingInput.FullSizeImageOrientation;
                var inputImage  = CIImage.FromUrl(url);
                inputImage      = inputImage.CreateWithOrientation((CIImageOrientation)orientation);

                // Create the filter to apply.
                filter.SetDefaults();
                filter.Image = inputImage;

                // Apply the filter.
                CIImage outputImage = filter.OutputImage;

                // Create a PHAdjustmentData object that describes the filter that was applied.
                var adjustmentData = new PHAdjustmentData(
                    AdjustmentFormatIdentifier,
                    "1.0",
                    NSData.FromString(filter.Name, NSStringEncoding.UTF8)
                    );

                var contentEditingOutput = new PHContentEditingOutput(contentEditingInput);
                NSData jpegData          = outputImage.GetJpegRepresentation(0.9f);
                jpegData.Save(contentEditingOutput.RenderedContentUrl, true);
                contentEditingOutput.AdjustmentData = adjustmentData;

                // Ask the shared PHPhotoLinrary to perform the changes.
                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                    var request = PHAssetChangeRequest.ChangeRequest(Asset);
                    request.ContentEditingOutput = contentEditingOutput;
                }, (success, error) => {
                    if (!success)
                    {
                        Console.WriteLine("Error: {0}", error.LocalizedDescription);
                    }
                });
            });
        }
        static string FetchAdjustmentFilterName(PHContentEditingInput contentEditingInput)
        {
            string filterName = null;

            try {
                PHAdjustmentData adjustmentData = contentEditingInput.AdjustmentData;
                if (adjustmentData != null)
                {
                    filterName = (NSString)NSKeyedUnarchiver.UnarchiveObject(adjustmentData.Data);
                }
            } catch (Exception exception) {
                Console.WriteLine("Exception decoding adjustment data: {0}", exception);
            }

            return(filterName);
        }
        public void StartContentEditing(PHContentEditingInput input, UIImage placeholderImage)
        {
            // Present content for editing and keep the contentEditingInput for use when closing the edit session.
            // If you returned true from CanHandleAdjustmentData(), contentEditingInput has the original image and adjustment data.
            // If you returned false, the contentEditingInput has past edits "baked in".
            contentEditingInput = input;

            // Load input image
            switch (contentEditingInput.MediaType)
            {
            case PHAssetMediaType.Image:
                inputImage = contentEditingInput.DisplaySizeImage;
                break;

            case PHAssetMediaType.Video:
                inputImage = ImageFor(contentEditingInput.AvAsset, 0);
                break;

            default:
                break;
            }

            // Load adjustment data, if any
            try {
                PHAdjustmentData adjustmentData = contentEditingInput.AdjustmentData;
                if (adjustmentData != null)
                {
                    selectedFilterName = (string)(NSString)NSKeyedUnarchiver.UnarchiveObject(adjustmentData.Data);
                }
            } catch (Exception exception) {
                Console.WriteLine("Exception decoding adjustment data: {0}", exception);
            }

            if (string.IsNullOrWhiteSpace(selectedFilterName))
            {
                selectedFilterName = "CISepiaTone";
            }

            initialFilterName = selectedFilterName;

            // Update filter and background image
            UpdateFilter();
            UpdateFilterPreview();
            BackgroundImageView.Image = placeholderImage;
        }
 public bool CanHandleAdjustmentData(PHAdjustmentData adjustmentData)
 {
     return(false);
 }
 public bool CanHandleAdjustmentData(PHAdjustmentData adjustmentData)
 {
     return false;
 }
 public bool CanHandleAdjustmentData(PHAdjustmentData adjustmentData)
 {
     // Inspect the adjustmentData to determine whether your extension can work with past edits.
     // (Typically, you use its formatIdentifier and formatVersion properties to do this.)
     bool result = adjustmentData.FormatIdentifier == BundleId;
     result &= adjustmentData.FormatVersion == "1.0";
     return result;
 }
Esempio n. 10
0
        void ApplyFilter(CIFilter filter)
        {
            // Set up a handler to make sure we can handle prior edits.
            var options = new PHContentEditingInputRequestOptions();

            options.CanHandleAdjustmentData = (adjustmentData => {
                return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion);
            });

            // Prepare for editing.
            Asset.RequestContentEditingInput(options, (input, requestStatusInfo) => {
                if (input == null)
                {
                    throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}");
                }

                // This handler gets called on the main thread; dispatch to a background queue for processing.
                DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() => {
                    // Create a PHAdjustmentData object that describes the filter that was applied.
                    var adjustmentData = new PHAdjustmentData(
                        formatIdentifier,
                        formatVersion,
                        NSData.FromString(filter.Name, NSStringEncoding.UTF8));

                    // NOTE:
                    // This app's filter UI is fire-and-forget. That is, the user picks a filter,
                    // and the app applies it and outputs the saved asset immediately. There's
                    // no UI state for having chosen but not yet committed an edit. This means
                    // there's no role for reading adjustment data -- you do that to resume
                    // in-progress edits, and this sample app has no notion of "in-progress".
                    //
                    // However, it's still good to write adjustment data so that potential future
                    // versions of the app (or other apps that understand our adjustement data
                    // format) could make use of it.

                    // Create content editing output, write the adjustment data.
                    var output = new PHContentEditingOutput(input)
                    {
                        AdjustmentData = adjustmentData
                    };

                    // Select a filtering function for the asset's media type.
                    Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
                    if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive))
                    {
                        applyFunc = ApplyLivePhotoFilter;
                    }
                    else if (Asset.MediaType == PHAssetMediaType.Image)
                    {
                        applyFunc = ApplyPhotoFilter;
                    }
                    else
                    {
                        applyFunc = ApplyVideoFilter;
                    }

                    // Apply the filter.
                    applyFunc(filter, input, output, () => {
                        // When rendering is done, commit the edit to the Photos library.
                        PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                            var request = PHAssetChangeRequest.ChangeRequest(Asset);
                            request.ContentEditingOutput = output;
                        }, (success, error) => {
                            if (!success)
                            {
                                Console.WriteLine($"can't edit asset: {error.LocalizedDescription}");
                            }
                        });
                    });
                });
            });
        }
        public void FinishContentEditing(Action <PHContentEditingOutput> completionHandler)
        {
            PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput(contentEditingInput);

            // Adjustment data
            NSData           archivedData   = NSKeyedArchiver.ArchivedDataWithRootObject((NSString)selectedFilterName);
            PHAdjustmentData adjustmentData = new PHAdjustmentData("com.your-company.PhotoFilter", "1.0",
                                                                   archivedData);

            contentEditingOutput.AdjustmentData = adjustmentData;

            switch (contentEditingInput.MediaType)
            {
            case PHAssetMediaType.Image:
            {
                // Get full size image
                NSUrl url = contentEditingInput.FullSizeImageUrl;
                CIImageOrientation orientation = contentEditingInput.FullSizeImageOrientation;

                // Generate rendered JPEG data
                UIImage image = UIImage.FromFile(url.Path);
                image = TransformeImage(image, orientation);
                NSData renderedJPEGData = image.AsJPEG(0.9f);

                // Save JPEG data
                NSError error   = null;
                bool    success = renderedJPEGData.Save(contentEditingOutput.RenderedContentUrl, NSDataWritingOptions.Atomic, out error);
                if (success)
                {
                    completionHandler(contentEditingOutput);
                }
                else
                {
                    Console.WriteLine("An error occured: {0}", error);
                    completionHandler(null);
                }
                break;
            }

            case PHAssetMediaType.Video:
            {
                // Get AV asset
                AVReaderWriter avReaderWriter = new AVReaderWriter(contentEditingInput.AvAsset);
                avReaderWriter.Delegate = this;

                // Save filtered video
                avReaderWriter.WriteToUrl(contentEditingOutput.RenderedContentUrl,
                                          progress => {
                    },
                                          error => {
                        if (error == null)
                        {
                            completionHandler(contentEditingOutput);
                            return;
                        }
                        Console.WriteLine("An error occured: {0}", error);
                        completionHandler(null);
                    });
                break;
            }

            default:
                break;
            }
        }
        void UpdateToolbars()
        {
            // Enable editing buttons if the asset can be edited.
            EditButton.Enabled     = Asset.CanPerformEditOperation(PHAssetEditOperation.Content);
            FavoriteButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Properties);
            FavoriteButton.Title   = Asset.Favorite ? "♥︎" : "♡";

            // Enable the trash button if the asset can be deleted.
            if (AssetCollection != null)
            {
                TrashButton.Enabled = AssetCollection.CanPerformEditOperation(PHCollectionEditOperation.RemoveContent);
            }
            else
            {
                TrashButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Delete);
            }

            // Set the appropriate toolbarItems based on the mediaType of the asset.
            if (Asset.MediaType == PHAssetMediaType.Video)
            {
#if __TVOS__
                NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { PlayButton, FavoriteButton, TrashButton };
#elif __IOS__
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, PlayButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
            else
            {
#if __TVOS__
                // In tvOS, PHLivePhotoView doesn't do playback gestures,
                // so add a play button for Live Photos.
                if (Asset.PlaybackStyle == PHAssetPlaybackStyle.LivePhoto)
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { LivePhotoPlayButton, FavoriteButton, TrashButton }
                }
                ;
                else
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { FavoriteButton, TrashButton }
                };
#elif __IOS__
                // In iOS, present both stills and Live Photos the same way, because
                // PHLivePhotoView provides the same gesture-based UI as in Photos app.
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
        }

        void UpdateStillImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageForAsset(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (image, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (image == null)
                {
                    return;
                }

                // Now that we have the image, show it.
                ImageView.Hidden = false;
                ImageView.Image  = image;
            });
        }

        void UpdateLivePhoto()
        {
            // Prepare the options to pass when fetching the live photo.
            var options = new PHLivePhotoRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary dictionary) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() => ProgressView.Progress = (float)progress);
                }
            };

            ProgressView.Hidden = false;
            // Request the live photo for the asset from the default PHImageManager.
            PHImageManager.DefaultManager.RequestLivePhoto(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (livePhoto, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the live photo view and display the live photo.
                if (livePhoto == null)
                {
                    return;
                }

                // Now that we have the Live Photo, show it.
                ImageView.Hidden         = true;
                AnimatedImageView.Hidden = true;
                LivePhotoView.Hidden     = false;
                LivePhotoView.LivePhoto  = livePhoto;

                // Playback a short section of the live photo; similar to the Photos share sheet.
                if (!isPlayingHint)
                {
                    isPlayingHint = true;
                    LivePhotoView.StartPlayback(PHLivePhotoViewPlaybackStyle.Hint);
                }
            });
        }

        void UpdateAnimatedImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                Version              = PHImageRequestOptionsVersion.Original,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageData(Asset, options, (data, dataUti, orientation, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (data == null)
                {
                    return;
                }

                var animatedImage = new AnimatedImage(data);

                LivePhotoView.Hidden            = true;
                ImageView.Hidden                = true;
                AnimatedImageView.Hidden        = false;
                AnimatedImageView.AnimatedImage = animatedImage;
                AnimatedImageView.IsPlaying     = true;
            });
        }

        #endregion

        #region Asset editing

        void RevertAsset(UIAlertAction action)
        {
            PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
            {
                var request = PHAssetChangeRequest.ChangeRequest(Asset);
                request.RevertAssetContentToOriginal();
            }, (success, error) =>
            {
                if (!success)
                {
                    Console.WriteLine($"can't revert asset: {error.LocalizedDescription}");
                }
            });
        }

        void ApplyFilter(CIFilter filter)
        {
            // Set up a handler to make sure we can handle prior edits.
            var options = new PHContentEditingInputRequestOptions();

            options.CanHandleAdjustmentData = (adjustmentData =>
            {
                return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion);
            });

            // Prepare for editing.
            Asset.RequestContentEditingInput(options, (input, requestStatusInfo) =>
            {
                if (input == null)
                {
                    throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}");
                }

                // This handler gets called on the main thread; dispatch to a background queue for processing.
                DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() =>
                {
                    // Create a PHAdjustmentData object that describes the filter that was applied.
                    var adjustmentData = new PHAdjustmentData(
                        formatIdentifier,
                        formatVersion,
                        NSData.FromString(filter.Name, NSStringEncoding.UTF8));

                    // NOTE:
                    // This app's filter UI is fire-and-forget. That is, the user picks a filter,
                    // and the app applies it and outputs the saved asset immediately. There's
                    // no UI state for having chosen but not yet committed an edit. This means
                    // there's no role for reading adjustment data -- you do that to resume
                    // in-progress edits, and this sample app has no notion of "in-progress".
                    //
                    // However, it's still good to write adjustment data so that potential future
                    // versions of the app (or other apps that understand our adjustement data
                    // format) could make use of it.

                    // Create content editing output, write the adjustment data.
                    var output = new PHContentEditingOutput(input)
                    {
                        AdjustmentData = adjustmentData
                    };

                    // Select a filtering function for the asset's media type.
                    Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
                    if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive))
                    {
                        applyFunc = ApplyLivePhotoFilter;
                    }
                    else if (Asset.MediaType == PHAssetMediaType.Image)
                    {
                        applyFunc = ApplyPhotoFilter;
                    }
                    else
                    {
                        applyFunc = ApplyVideoFilter;
                    }

                    // Apply the filter.
                    applyFunc(filter, input, output, () =>
                    {
                        // When rendering is done, commit the edit to the Photos library.
                        PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
                        {
                            var request = PHAssetChangeRequest.ChangeRequest(Asset);
                            request.ContentEditingOutput = output;
                        }, (success, error) =>
                        {
                            if (!success)
                            {
                                Console.WriteLine($"can't edit asset: {error.LocalizedDescription}");
                            }
                        });
                    });
                });
            });
        }

        void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load the full size image.
            var inputImage = new CIImage(input.FullSizeImageUrl);

            if (inputImage == null)
            {
                throw new InvalidProgramException("can't load input image to edit");
            }

            // Apply the filter.
            filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation);
            var outputImage = filter.OutputImage;

            // Write the edited image as a JPEG.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
            NSError error;

            if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace, new NSDictionary(), out error))
            {
                throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}");
            }

            completion();
        }

        void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // This app filters assets only for output. In an app that previews
            // filters while editing, create a livePhotoContext early and reuse it
            // to render both for previewing and for final output.
            var livePhotoContext = new PHLivePhotoEditingContext(input);

            livePhotoContext.FrameProcessor2 = (IPHLivePhotoFrame frame, ref NSError _) =>
            {
                filter.Image = frame.Image;
                return(filter.OutputImage);
            };
            livePhotoContext.SaveLivePhoto(output, (PHLivePhotoEditingOption)null, (success, error) =>
            {
                if (success)
                {
                    completion();
                }
                else
                {
                    Console.WriteLine("can't output live photo");
                }
            });
            // Applying edits to a Live Photo currently crashes
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=58227
        }

        void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load AVAsset to process from input.
            var avAsset = input.AudiovisualAsset;

            if (avAsset == null)
            {
                throw new InvalidProgramException("can't get AV asset to edit");
            }

            // Set up a video composition to apply the filter.
            var composition = AVVideoComposition.CreateVideoComposition(avAsset, request =>
            {
                filter.Image = request.SourceImage;
                var filtered = filter.OutputImage;
                request.Finish(filtered, null);
            });

            // Export the video composition to the output URL.
            var export = new AVAssetExportSession(avAsset, AVAssetExportSessionPreset.HighestQuality)
            {
                OutputFileType   = AVFileType.QuickTimeMovie,
                OutputUrl        = output.RenderedContentUrl,
                VideoComposition = composition
            };

            export.ExportAsynchronously(completion);
        }
 public bool CanHandleAdjustmentData(PHAdjustmentData adjustmentData)
 {
     // Inspect the adjustmentData to determine whether your extension can work with past edits.
     // (Typically, you use its formatIdentifier and formatVersion properties to do this.)
     return(false);
 }
		void ApplyFilter (CIFilter filter)
		{
			// Prepare the options to pass when requesting to edit the image.
			var options = new PHContentEditingInputRequestOptions ();
			options.SetCanHandleAdjustmentDataHandler (adjustmentData => {
				bool result = false;
				InvokeOnMainThread (() => {
					result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0";
				});

				return result;
			});

			Asset.RequestContentEditingInput (options,(contentEditingInput, requestStatusInfo) => {
				// Create a CIImage from the full image representation.
				var url = contentEditingInput.FullSizeImageUrl;
				int orientation = (int)contentEditingInput.FullSizeImageOrientation;
				var inputImage = CIImage.FromUrl (url);
				inputImage = inputImage.CreateWithOrientation ((CIImageOrientation)orientation);

				// Create the filter to apply.
				filter.SetDefaults ();
				filter.Image = inputImage;

				// Apply the filter.
				CIImage outputImage = filter.OutputImage;

				// Create a PHAdjustmentData object that describes the filter that was applied.
				var adjustmentData = new PHAdjustmentData (
					AdjustmentFormatIdentifier,
					"1.0",
					NSData.FromString (filter.Name, NSStringEncoding.UTF8)
				);

				var contentEditingOutput = new PHContentEditingOutput (contentEditingInput);
				NSData jpegData = outputImage.GetJpegRepresentation (0.9f);
				jpegData.Save (contentEditingOutput.RenderedContentUrl, true);
				contentEditingOutput.AdjustmentData = adjustmentData;

				// Ask the shared PHPhotoLinrary to perform the changes.
				PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => {
					var request = PHAssetChangeRequest.ChangeRequest (Asset);
						request.ContentEditingOutput = contentEditingOutput;
					}, (success, error) => {
					if (!success)
						Console.WriteLine ("Error: {0}", error.LocalizedDescription);
				});
			});
		}
		public void FinishContentEditing (Action<PHContentEditingOutput> completionHandler)
		{
			PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput (contentEditingInput);

			// Adjustment data
			NSData archivedData = NSKeyedArchiver.ArchivedDataWithRootObject ((NSString)selectedFilterName);
			PHAdjustmentData adjustmentData = new PHAdjustmentData ("com.your-company.PhotoFilter", "1.0",
				                                  archivedData);
			contentEditingOutput.AdjustmentData = adjustmentData;

			switch (contentEditingInput.MediaType) {
			case PHAssetMediaType.Image:
				{
					// Get full size image
					NSUrl url = contentEditingInput.FullSizeImageUrl;
					CIImageOrientation orientation = contentEditingInput.FullSizeImageOrientation;

					// Generate rendered JPEG data
					UIImage image = UIImage.FromFile (url.Path);
					image = TransformeImage (image, orientation);
					NSData renderedJPEGData = image.AsJPEG (0.9f);

					// Save JPEG data
					NSError error = null;
					bool success = renderedJPEGData.Save (contentEditingOutput.RenderedContentUrl, NSDataWritingOptions.Atomic, out error);
					if (success) {
						completionHandler (contentEditingOutput);
					} else {
						Console.WriteLine ("An error occured: {0}", error);
						completionHandler (null);
					}
					break;
				}

			case PHAssetMediaType.Video:
				{
					// Get AV asset
					AVReaderWriter avReaderWriter = new AVReaderWriter (contentEditingInput.AvAsset);
					avReaderWriter.Delegate = this;

					// Save filtered video
					avReaderWriter.WriteToUrl (contentEditingOutput.RenderedContentUrl,
						progress => {
						},
						error => {
							if (error == null) {
								completionHandler (contentEditingOutput);
								return;
							}
							Console.WriteLine ("An error occured: {0}", error);
							completionHandler (null);
						});
					break;
				}

			default:
				break;
			}
		}
		void ApplyFilter (CIFilter filter)
		{
			// Set up a handler to make sure we can handle prior edits.
			var options = new PHContentEditingInputRequestOptions ();
			options.CanHandleAdjustmentData = (adjustmentData => {
				return adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion;
			});

			// Prepare for editing.
			Asset.RequestContentEditingInput (options, (input, requestStatusInfo) => {
				if (input == null)
					throw new InvalidProgramException ($"can't get content editing input: {requestStatusInfo}");

				// This handler gets called on the main thread; dispatch to a background queue for processing.
				DispatchQueue.GetGlobalQueue (DispatchQueuePriority.Default).DispatchAsync (() => {
					// Create a PHAdjustmentData object that describes the filter that was applied.
					var adjustmentData = new PHAdjustmentData (
					formatIdentifier,
					formatVersion,
					NSData.FromString (filter.Name, NSStringEncoding.UTF8));

					// NOTE:
					// This app's filter UI is fire-and-forget. That is, the user picks a filter, 
					// and the app applies it and outputs the saved asset immediately. There's 
					// no UI state for having chosen but not yet committed an edit. This means
					// there's no role for reading adjustment data -- you do that to resume
					// in-progress edits, and this sample app has no notion of "in-progress".
					//
					// However, it's still good to write adjustment data so that potential future
					// versions of the app (or other apps that understand our adjustement data
					// format) could make use of it.

					// Create content editing output, write the adjustment data.
					var output = new PHContentEditingOutput (input) {
						AdjustmentData = adjustmentData
					};

					// Select a filtering function for the asset's media type.
					Action<CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
					if (Asset.MediaSubtypes.HasFlag (PHAssetMediaSubtype.PhotoLive))
						applyFunc = ApplyLivePhotoFilter;
					else if (Asset.MediaType == PHAssetMediaType.Image)
						applyFunc = ApplyPhotoFilter;
					else
						applyFunc = ApplyVideoFilter;

					// Apply the filter.
					applyFunc (filter, input, output, () => {
						// When rendering is done, commit the edit to the Photos library.
						PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => {
							var request = PHAssetChangeRequest.ChangeRequest (Asset);
							request.ContentEditingOutput = output;
						}, (success, error) => {
							if (!success)
								Console.WriteLine ($"can't edit asset: {error.LocalizedDescription}");
						});
					});
				});
			});
		}
 public bool CanHandleAdjustmentData(PHAdjustmentData adjustmentData)
 {
     // Inspect the adjustmentData to determine whether your extension can work with past edits.
     // (Typically, you use its formatIdentifier and formatVersion properties to do this.)
     return false;
 }