void ApplyNoirFilter(object sender, EventArgs e)
        {
            Asset.RequestContentEditingInput(new PHContentEditingInputRequestOptions(), (input, options) => {
                // perform the editing operation, which applies a noir filter in this case
                var image = CIImage.FromUrl(input.FullSizeImageUrl);
                image     = image.CreateWithOrientation((CIImageOrientation)input.FullSizeImageOrientation);
                var noir  = new CIPhotoEffectNoir {
                    Image = image
                };
                var ciContext = CIContext.FromOptions(null);
                var output    = noir.OutputImage;

                var uiImage     = UIImage.FromImage(ciContext.CreateCGImage(output, output.Extent));
                imageView.Image = uiImage;

                // save the filtered image data to a PHContentEditingOutput instance
                var editingOutput  = new PHContentEditingOutput(input);
                var adjustmentData = new PHAdjustmentData();
                var data           = uiImage.AsJPEG();
                NSError error;
                data.Save(editingOutput.RenderedContentUrl, false, out error);
                editingOutput.AdjustmentData = adjustmentData;

                // make a change request to publish the changes form the editing output
                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(
                    () => {
                    PHAssetChangeRequest request = PHAssetChangeRequest.ChangeRequest(Asset);
                    request.ContentEditingOutput = editingOutput;
                },
                    (ok, err) => Console.WriteLine("photo updated successfully: {0}", ok));
            });
        }
        Task FinishPhotoEditing(Action <PHContentEditingOutput> completionHandler)
        {
            PHContentEditingOutput contentEditingOutput = CreateOutput();

            // Get full size image
            NSUrl url = contentEditingInput.FullSizeImageUrl;
            CIImageOrientation orientation = contentEditingInput.FullSizeImageOrientation;

            // Generate rendered JPEG data
            using (UIImage image = UIImage.FromFile(url.Path)) {
                using (UIImage transformedImage = TransformImage(image, orientation)) {
                    using (NSData renderedJPEGData = transformedImage.AsJPEG(0.9f)) {
                        // Save JPEG data
                        NSError error   = null;
                        bool    success = renderedJPEGData.Save(contentEditingOutput.RenderedContentUrl, NSDataWritingOptions.Atomic, out error);
                        PHContentEditingOutput output = success ? contentEditingOutput : null;

                        if (!success)
                        {
                            Console.WriteLine("An error occured: {0}", error);
                        }

                        completionHandler(output);
                        return(Task.FromResult <object> (null));                       // inform that we may safely clean up any data
                    }
                }
            }
        }
        void ApplyNoirFilter(object sender, EventArgs e)
        {
            Asset.RequestContentEditingInput (new PHContentEditingInputRequestOptions (), (input, options) => {

                // perform the editing operation, which applies a noir filter in this case
                var image = CIImage.FromUrl (input.FullSizeImageUrl);
                image = image.CreateWithOrientation ((CIImageOrientation)input.FullSizeImageOrientation);
                var noir = new CIPhotoEffectNoir {
                    Image = image
                };
                var ciContext = CIContext.FromOptions (null);
                var output = noir.OutputImage;

                var uiImage = UIImage.FromImage (ciContext.CreateCGImage (output, output.Extent));
                imageView.Image = uiImage;

                // save the filtered image data to a PHContentEditingOutput instance
                var editingOutput = new PHContentEditingOutput (input);
                var adjustmentData = new PHAdjustmentData ();
                var data = uiImage.AsJPEG ();
                NSError error;
                data.Save (editingOutput.RenderedContentUrl, false, out error);
                editingOutput.AdjustmentData = adjustmentData;

                // make a change request to publish the changes form the editing output
                PHPhotoLibrary.GetSharedPhotoLibrary.PerformChanges (
                    () => {
                        PHAssetChangeRequest request = PHAssetChangeRequest.ChangeRequest (Asset);
                        request.ContentEditingOutput = editingOutput;
                    },
                    (ok, err) => Console.WriteLine ("photo updated successfully: {0}", ok));
            });
        }
Example #4
0
        void OnApplyFilter(object sender, EventArgs e)
        {
            Asset.RequestContentEditingInput(new PHContentEditingInputRequestOptions(),
                                             (input, options) => {
                var image = CIImage.FromUrl(input.FullSizeImageUrl);
                image     = image.CreateWithOrientation(input.FullSizeImageOrientation);

                var updatedPhoto = new CIPhotoEffectNoir {
                    Image = image
                };
                var ciContext = CIContext.FromOptions(null);
                var output    = updatedPhoto.OutputImage;

                // Get the upated image
                var uiImage    = UIImage.FromImage(ciContext.CreateCGImage(output, output.Extent));
                TheImage.Image = uiImage;

                // Save the image data to a PHContentEditingOutput instance
                var editingOutput = new PHContentEditingOutput(input);

                NSError error;
                var data = uiImage.AsJPEG();
                data.Save(editingOutput.RenderedContentUrl, false, out error);
                editingOutput.AdjustmentData = new PHAdjustmentData();;

                // Request to publish the changes form the editing output back to the photo library
                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(
                    () => {
                    PHAssetChangeRequest request = PHAssetChangeRequest.ChangeRequest(Asset);
                    request.ContentEditingOutput = editingOutput;
                },
                    (ok, err) => Console.WriteLine("Photo updated : {0}, {1}", ok, err));
            });
        }
Example #5
0
        void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load AVAsset to process from input.
            var avAsset = input.AudiovisualAsset;

            if (avAsset == null)
            {
                throw new InvalidProgramException("can't get AV asset to edit");
            }

            // Set up a video composition to apply the filter.
            var composition = AVVideoComposition.CreateVideoComposition(avAsset, request => {
                filter.Image = request.SourceImage;
                var filtered = filter.OutputImage;
                request.Finish(filtered, null);
            });

            // Export the video composition to the output URL.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443
            var export = new AVAssetExportSession(avAsset, AVAssetExportSession.PresetHighestQuality)
            {
                OutputFileType   = AVFileType.QuickTimeMovie,
                OutputUrl        = output.RenderedContentUrl,
                VideoComposition = composition
            };

            export.ExportAsynchronously(completion);
        }
        PHContentEditingOutput CreateOutput()
        {
            PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput(contentEditingInput);

            contentEditingOutput.AdjustmentData = CreateAdjustmentData();

            return(contentEditingOutput);
        }
        void ApplyFilter(CIFilter filter)
        {
            // Prepare the options to pass when requesting to edit the image.
            var options = new PHContentEditingInputRequestOptions();

            options.SetCanHandleAdjustmentDataHandler(adjustmentData => {
                bool result = false;
                InvokeOnMainThread(() => {
                    result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0";
                });

                return(result);
            });

            Asset.RequestContentEditingInput(options, (contentEditingInput, requestStatusInfo) => {
                // Create a CIImage from the full image representation.
                var url         = contentEditingInput.FullSizeImageUrl;
                int orientation = (int)contentEditingInput.FullSizeImageOrientation;
                var inputImage  = CIImage.FromUrl(url);
                inputImage      = inputImage.CreateWithOrientation((CIImageOrientation)orientation);

                // Create the filter to apply.
                filter.SetDefaults();
                filter.Image = inputImage;

                // Apply the filter.
                CIImage outputImage = filter.OutputImage;

                // Create a PHAdjustmentData object that describes the filter that was applied.
                var adjustmentData = new PHAdjustmentData(
                    AdjustmentFormatIdentifier,
                    "1.0",
                    NSData.FromString(filter.Name, NSStringEncoding.UTF8)
                    );

                var contentEditingOutput = new PHContentEditingOutput(contentEditingInput);
                NSData jpegData          = outputImage.GetJpegRepresentation(0.9f);
                jpegData.Save(contentEditingOutput.RenderedContentUrl, true);
                contentEditingOutput.AdjustmentData = adjustmentData;

                // Ask the shared PHPhotoLinrary to perform the changes.
                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                    var request = PHAssetChangeRequest.ChangeRequest(Asset);
                    request.ContentEditingOutput = contentEditingOutput;
                }, (success, error) => {
                    if (!success)
                    {
                        Console.WriteLine("Error: {0}", error.LocalizedDescription);
                    }
                });
            });
        }
        public void FinishContentEditing(Action<PHContentEditingOutput> completionHandler)
        {
            // Update UI to reflect that editing has finished and output is being rendered.

            // Render and provide output on a background queue.
            NSObject.InvokeInBackground (() => {
                // Create editing output from the editing input.
                var output = new PHContentEditingOutput (input);

                // Call completion handler to commit edit to Photos.
                completionHandler (output);

                // Clean up temporary files, etc.
            });
        }
        public void FinishContentEditing(Action <PHContentEditingOutput> completionHandler)
        {
            // Update UI to reflect that editing has finished and output is being rendered.

            // Render and provide output on a background queue.
            NSObject.InvokeInBackground(() => {
                // Create editing output from the editing input.
                var output = new PHContentEditingOutput(input);

                // Call completion handler to commit edit to Photos.
                completionHandler(output);

                // Clean up temporary files, etc.
            });
        }
        public void FinishContentEditing(Action <PHContentEditingOutput> completionHandler)
        {
            PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput(contentEditingInput);

            contentEditingOutput.AdjustmentData = CreateAdjustmentData();

            Task assetWork = null;

            switch (contentEditingInput.MediaType)
            {
            case PHAssetMediaType.Image:
                assetWork = FinishPhotoEditing(completionHandler);
                break;

            case PHAssetMediaType.Video:
                assetWork = FinishVideoEditing(completionHandler);
                break;

            default:
                throw new NotImplementedException();
            }

            assetWork.ContinueWith(_ => {
                InvokeOnMainThread(() => {
                    initialFilterName = null;

                    TryDisposeContentInputImage();
                    TryDisposeContentInput();

                    inputImage.Dispose();
                    inputImage = null;

                    TryDisposeFilterInput();
                    TryDisposeFilter();

                    BackgroundImageView.Image.Dispose();
                    BackgroundImageView.Image = null;

                    TryDisposeFilterPreviewImg();
                });
            });
        }
        public void FinishContentEditing(Action <PHContentEditingOutput> completionHandler)
        {
            // Update UI to reflect that editing has finished and output is being rendered.

            // Render and provide output on a background queue.
            NSObject.InvokeInBackground(() => {
                // Create editing output from the editing input.
                var output = new PHContentEditingOutput(input);

                // Provide new adjustments and render output to given location.
                // output.AdjustmentData = <#new adjustment data#>;
                // NSData renderedJPEGData = <#output JPEG#>;
                // renderedJPEGData.Save (output.RenderedContentURL, true);

                // Call completion handler to commit edit to Photos.
                completionHandler(output);

                // Clean up temporary files, etc.
            });
        }
Example #12
0
        void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load the full size image.
            var inputImage = new CIImage(input.FullSizeImageUrl);

            // Apply the filter.
            filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation);
            var outputImage = filter.OutputImage;

            // Write the edited image as a JPEG.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
            NSError error;

            if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace(), new NSDictionary(), out error))
            {
                throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}");
            }

            completion();
        }
        public void FinishContentEditing(Action<PHContentEditingOutput> completionHandler)
        {
            // Update UI to reflect that editing has finished and output is being rendered.

            // Render and provide output on a background queue.
            NSObject.InvokeInBackground (() => {
                // Create editing output from the editing input.
                var output = new PHContentEditingOutput (input);

                // Provide new adjustments and render output to given location.
                // output.AdjustmentData = <#new adjustment data#>;
                // NSData renderedJPEGData = <#output JPEG#>;
                // renderedJPEGData.Save (output.RenderedContentURL, true);

                // Call completion handler to commit edit to Photos.
                completionHandler (output);

                // Clean up temporary files, etc.
            });
        }
        Task FinishVideoEditing(Action <PHContentEditingOutput> completionHandler)
        {
            PHContentEditingOutput contentEditingOutput = CreateOutput();
            AVReaderWriter         avReaderWriter       = new AVReaderWriter(contentEditingInput.AvAsset, this);

            var tcs = new TaskCompletionSource <object> ();

            // Save filtered video
            avReaderWriter.WriteToUrl(contentEditingOutput.RenderedContentUrl, error => {
                bool success = error == null;
                PHContentEditingOutput output = success ? contentEditingOutput : null;
                if (!success)
                {
                    Console.WriteLine("An error occured: {0}", error);
                }
                completionHandler(output);
                tcs.SetResult(null);                  // inform that we may safely clean up any data
            });

            return(tcs.Task);
        }
Example #15
0
        void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // This app filters assets only for output. In an app that previews
            // filters while editing, create a livePhotoContext early and reuse it
            // to render both for previewing and for final output.
            var livePhotoContext = new PHLivePhotoEditingContext(input);

            livePhotoContext.FrameProcessor = (frame, _) => {
                filter.Image = frame.Image;
                return(filter.OutputImage);
            };
            livePhotoContext.SaveLivePhoto(output, null, (success, error) => {
                if (success)
                {
                    completion();
                }
                else
                {
                    Console.WriteLine("can't output live photo");
                }
            });
        }
        public void FinishContentEditing(Action <PHContentEditingOutput> completionHandler)
        {
            PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput(contentEditingInput);

            // Adjustment data
            NSData           archivedData   = NSKeyedArchiver.ArchivedDataWithRootObject((NSString)selectedFilterName);
            PHAdjustmentData adjustmentData = new PHAdjustmentData("com.your-company.PhotoFilter", "1.0",
                                                                   archivedData);

            contentEditingOutput.AdjustmentData = adjustmentData;

            switch (contentEditingInput.MediaType)
            {
            case PHAssetMediaType.Image:
            {
                // Get full size image
                NSUrl url = contentEditingInput.FullSizeImageUrl;
                CIImageOrientation orientation = contentEditingInput.FullSizeImageOrientation;

                // Generate rendered JPEG data
                UIImage image = UIImage.FromFile(url.Path);
                image = TransformeImage(image, orientation);
                NSData renderedJPEGData = image.AsJPEG(0.9f);

                // Save JPEG data
                NSError error   = null;
                bool    success = renderedJPEGData.Save(contentEditingOutput.RenderedContentUrl, NSDataWritingOptions.Atomic, out error);
                if (success)
                {
                    completionHandler(contentEditingOutput);
                }
                else
                {
                    Console.WriteLine("An error occured: {0}", error);
                    completionHandler(null);
                }
                break;
            }

            case PHAssetMediaType.Video:
            {
                // Get AV asset
                AVReaderWriter avReaderWriter = new AVReaderWriter(contentEditingInput.AvAsset);
                avReaderWriter.Delegate = this;

                // Save filtered video
                avReaderWriter.WriteToUrl(contentEditingOutput.RenderedContentUrl,
                                          progress => {
                    },
                                          error => {
                        if (error == null)
                        {
                            completionHandler(contentEditingOutput);
                            return;
                        }
                        Console.WriteLine("An error occured: {0}", error);
                        completionHandler(null);
                    });
                break;
            }

            default:
                break;
            }
        }
        public void FinishContentEditing(Action<PHContentEditingOutput> completionHandler)
        {
            PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput (contentEditingInput);
            contentEditingOutput.AdjustmentData = CreateAdjustmentData ();

            Task assetWork = null;
            switch (contentEditingInput.MediaType) {
                case PHAssetMediaType.Image:
                    assetWork = FinishPhotoEditing (completionHandler);
                    break;

                case PHAssetMediaType.Video:
                    assetWork = FinishVideoEditing (completionHandler);
                    break;

                default:
                    throw new NotImplementedException ();
            }

            assetWork.ContinueWith (_ => {
                InvokeOnMainThread(()=> {
                    initialFilterName = null;

                    TryDisposeContentInputImage();
                    TryDisposeContentInput();

                    inputImage.Dispose ();
                    inputImage = null;

                    TryDisposeFilterInput ();
                    TryDisposeFilter ();

                    BackgroundImageView.Image.Dispose ();
                    BackgroundImageView.Image = null;

                    TryDisposeFilterPreviewImg ();
                });
            });
        }
        PHContentEditingOutput CreateOutput()
        {
            PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput (contentEditingInput);
            contentEditingOutput.AdjustmentData = CreateAdjustmentData ();

            return contentEditingOutput;
        }
		void ApplyFilter (CIFilter filter)
		{
			// Set up a handler to make sure we can handle prior edits.
			var options = new PHContentEditingInputRequestOptions ();
			options.CanHandleAdjustmentData = (adjustmentData => {
				return adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion;
			});

			// Prepare for editing.
			Asset.RequestContentEditingInput (options, (input, requestStatusInfo) => {
				if (input == null)
					throw new InvalidProgramException ($"can't get content editing input: {requestStatusInfo}");

				// This handler gets called on the main thread; dispatch to a background queue for processing.
				DispatchQueue.GetGlobalQueue (DispatchQueuePriority.Default).DispatchAsync (() => {
					// Create a PHAdjustmentData object that describes the filter that was applied.
					var adjustmentData = new PHAdjustmentData (
					formatIdentifier,
					formatVersion,
					NSData.FromString (filter.Name, NSStringEncoding.UTF8));

					// NOTE:
					// This app's filter UI is fire-and-forget. That is, the user picks a filter, 
					// and the app applies it and outputs the saved asset immediately. There's 
					// no UI state for having chosen but not yet committed an edit. This means
					// there's no role for reading adjustment data -- you do that to resume
					// in-progress edits, and this sample app has no notion of "in-progress".
					//
					// However, it's still good to write adjustment data so that potential future
					// versions of the app (or other apps that understand our adjustement data
					// format) could make use of it.

					// Create content editing output, write the adjustment data.
					var output = new PHContentEditingOutput (input) {
						AdjustmentData = adjustmentData
					};

					// Select a filtering function for the asset's media type.
					Action<CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
					if (Asset.MediaSubtypes.HasFlag (PHAssetMediaSubtype.PhotoLive))
						applyFunc = ApplyLivePhotoFilter;
					else if (Asset.MediaType == PHAssetMediaType.Image)
						applyFunc = ApplyPhotoFilter;
					else
						applyFunc = ApplyVideoFilter;

					// Apply the filter.
					applyFunc (filter, input, output, () => {
						// When rendering is done, commit the edit to the Photos library.
						PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => {
							var request = PHAssetChangeRequest.ChangeRequest (Asset);
							request.ContentEditingOutput = output;
						}, (success, error) => {
							if (!success)
								Console.WriteLine ($"can't edit asset: {error.LocalizedDescription}");
						});
					});
				});
			});
		}
		public void FinishContentEditing (Action<PHContentEditingOutput> completionHandler)
		{
			PHContentEditingOutput contentEditingOutput = new PHContentEditingOutput (contentEditingInput);

			// Adjustment data
			NSData archivedData = NSKeyedArchiver.ArchivedDataWithRootObject ((NSString)selectedFilterName);
			PHAdjustmentData adjustmentData = new PHAdjustmentData ("com.your-company.PhotoFilter", "1.0",
				                                  archivedData);
			contentEditingOutput.AdjustmentData = adjustmentData;

			switch (contentEditingInput.MediaType) {
			case PHAssetMediaType.Image:
				{
					// Get full size image
					NSUrl url = contentEditingInput.FullSizeImageUrl;
					CIImageOrientation orientation = contentEditingInput.FullSizeImageOrientation;

					// Generate rendered JPEG data
					UIImage image = UIImage.FromFile (url.Path);
					image = TransformeImage (image, orientation);
					NSData renderedJPEGData = image.AsJPEG (0.9f);

					// Save JPEG data
					NSError error = null;
					bool success = renderedJPEGData.Save (contentEditingOutput.RenderedContentUrl, NSDataWritingOptions.Atomic, out error);
					if (success) {
						completionHandler (contentEditingOutput);
					} else {
						Console.WriteLine ("An error occured: {0}", error);
						completionHandler (null);
					}
					break;
				}

			case PHAssetMediaType.Video:
				{
					// Get AV asset
					AVReaderWriter avReaderWriter = new AVReaderWriter (contentEditingInput.AvAsset);
					avReaderWriter.Delegate = this;

					// Save filtered video
					avReaderWriter.WriteToUrl (contentEditingOutput.RenderedContentUrl,
						progress => {
						},
						error => {
							if (error == null) {
								completionHandler (contentEditingOutput);
								return;
							}
							Console.WriteLine ("An error occured: {0}", error);
							completionHandler (null);
						});
					break;
				}

			default:
				break;
			}
		}
		void ApplyPhotoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
		{
			// Load the full size image.
			var inputImage = new CIImage (input.FullSizeImageUrl);

			// Apply the filter.
			filter.Image = inputImage.CreateWithOrientation (input.FullSizeImageOrientation);
			var outputImage = filter.OutputImage;

			// Write the edited image as a JPEG.
			// TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
			NSError error;
			if (!ciContext.WriteJpegRepresentation (outputImage, output.RenderedContentUrl, inputImage.ColorSpace (), new NSDictionary(), out error))
				throw new InvalidProgramException ($"can't apply filter to image: {error.LocalizedDescription}");

			completion ();
		}
Example #22
0
        void ApplyFilter(CIFilter filter)
        {
            // Set up a handler to make sure we can handle prior edits.
            var options = new PHContentEditingInputRequestOptions();

            options.CanHandleAdjustmentData = (adjustmentData => {
                return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion);
            });

            // Prepare for editing.
            Asset.RequestContentEditingInput(options, (input, requestStatusInfo) => {
                if (input == null)
                {
                    throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}");
                }

                // This handler gets called on the main thread; dispatch to a background queue for processing.
                DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() => {
                    // Create a PHAdjustmentData object that describes the filter that was applied.
                    var adjustmentData = new PHAdjustmentData(
                        formatIdentifier,
                        formatVersion,
                        NSData.FromString(filter.Name, NSStringEncoding.UTF8));

                    // NOTE:
                    // This app's filter UI is fire-and-forget. That is, the user picks a filter,
                    // and the app applies it and outputs the saved asset immediately. There's
                    // no UI state for having chosen but not yet committed an edit. This means
                    // there's no role for reading adjustment data -- you do that to resume
                    // in-progress edits, and this sample app has no notion of "in-progress".
                    //
                    // However, it's still good to write adjustment data so that potential future
                    // versions of the app (or other apps that understand our adjustement data
                    // format) could make use of it.

                    // Create content editing output, write the adjustment data.
                    var output = new PHContentEditingOutput(input)
                    {
                        AdjustmentData = adjustmentData
                    };

                    // Select a filtering function for the asset's media type.
                    Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
                    if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive))
                    {
                        applyFunc = ApplyLivePhotoFilter;
                    }
                    else if (Asset.MediaType == PHAssetMediaType.Image)
                    {
                        applyFunc = ApplyPhotoFilter;
                    }
                    else
                    {
                        applyFunc = ApplyVideoFilter;
                    }

                    // Apply the filter.
                    applyFunc(filter, input, output, () => {
                        // When rendering is done, commit the edit to the Photos library.
                        PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                            var request = PHAssetChangeRequest.ChangeRequest(Asset);
                            request.ContentEditingOutput = output;
                        }, (success, error) => {
                            if (!success)
                            {
                                Console.WriteLine($"can't edit asset: {error.LocalizedDescription}");
                            }
                        });
                    });
                });
            });
        }
Example #23
0
		// this function will check if the asset corresponding to the localIdentifier passed in is already in the bugTrap album.
		// If it is, it will update the existing asset and return nil in the callback, otherwise, a new (duplicate) will be created
		// and the localIdentifier of the newly created asset will be returned by the callback

		public async Task<string> UpdateAsset (UIImage updatedSnapshot, string localIdentifier)
		{
			var tcs = new TaskCompletionSource<string> ();

			var collectionLocalIdentifier = await GetAlbumLocalIdentifier();

			if (string.IsNullOrEmpty(collectionLocalIdentifier)) return null;

			// get the bugTrap album
			var bugTrapAlbum = PHAssetCollection.FetchAssetCollections(new [] { collectionLocalIdentifier }, null)?.firstObject as PHAssetCollection;
			if (bugTrapAlbum != null) {

				// if we passed in null for the localIdentifier, we just want to save the image and
				// get a new identifier (most likely being used as the sdk)
				if (string.IsNullOrEmpty(localIdentifier)) {
					return await SaveAsset(updatedSnapshot, bugTrapAlbum, tcs);
				}

				// get the asset for the localIdentifier
				var asset = PHAsset.FetchAssetsUsingLocalIdentifiers(new [] { localIdentifier }, null)?.firstObject as PHAsset;
				if (asset != null) {

					// get all the albums containing this asset
					var containingAssetResults = PHAssetCollection.FetchAssetCollections(asset, PHAssetCollectionType.Album, null);
					if (containingAssetResults != null) {
						
						// check if any of the albums is the bugTrap album.  if the asset is already in the bugTrap album, update the existing asset.
						if (containingAssetResults.Contains(bugTrapAlbum)) {

							// retrieve a PHContentEditingInput object
							asset.RequestContentEditingInput(null, async (input, info) => 

								PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
								
								var editAssetOutput = new PHContentEditingOutput (input);
								editAssetOutput.AdjustmentData = new PHAdjustmentData ("io.bugtrap.bugTrap", "1.0.0", NSData.FromString("io.bugtrap.bugTrap"));
								
								var editAssetJpegData = updatedSnapshot.AsJPEG(1);
								editAssetJpegData.Save(editAssetOutput.RenderedContentUrl, true);

								var editAssetRequest = PHAssetChangeRequest.ChangeRequest(asset);
								editAssetRequest.ContentEditingOutput = editAssetOutput;

							}, (success, error) => {

								if (success) {

									if (!tcs.TrySetResult(null)) {
										var ex = new Exception ("UpdateAsset Failed");
										tcs.TrySetException(ex);
										// Log.Error(ex);
									}

								} else if (error != null) {
									// Log.Error("Photos", error);
									if (!tcs.TrySetResult(null)) {
										var ex = new Exception (error.LocalizedDescription);
										tcs.TrySetException(ex);
										// Log.Error(ex);
									}
								}
							}));
									
						} else {// if the asset isn't in the bugTrap album, create a new asset and put it in the album, and leave the original unchanged

							return await SaveAsset(updatedSnapshot, bugTrapAlbum, tcs);

//							string newLocalIdentifier = null;
//
//							PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
//
//								// create a new asset from the UIImage updatedSnapshot
//								var createAssetRequest = PHAssetChangeRequest.FromImage(updatedSnapshot);
//
//								// create a request to make changes to the bugTrap album
//								var collectionRequest = PHAssetCollectionChangeRequest.ChangeRequest(bugTrapAlbum);
//
//								// get a reference to the new localIdentifier
//								newLocalIdentifier = createAssetRequest.PlaceholderForCreatedAsset.LocalIdentifier;
//
//								// add the newly created asset to the bugTrap album
//								collectionRequest.AddAssets(new [] { createAssetRequest.PlaceholderForCreatedAsset });
//
//							}, (success, error) => {
//
//								if (success) {
//
//									if (!tcs.TrySetResult(newLocalIdentifier)) {
//										var ex = new Exception ("UpdateAsset Failed");
//										tcs.TrySetException(ex);
//										// Log.Error(ex);
//									}
//
//								} else if (error != null) {
//									// Log.Error("Photos", error);
//									if (!tcs.TrySetResult(null)) {
//										var ex = new Exception (error.LocalizedDescription);
//										tcs.TrySetException(ex);
//										// Log.Error(ex);
//									}
//								}
//							});
						}
					}
				}
			} else { // couldn't find the bugTrap album - user probably deleted it while the app was open

				albumLocalIdentifier = null;

				var identifier = await createAlbumAndSaveLocalIdentifier();

				if (!string.IsNullOrEmpty(identifier)) return await UpdateAsset(updatedSnapshot, localIdentifier);

				if (!tcs.TrySetResult(null)) {
					var ex = new Exception ("UpdateAsset Failed");
					tcs.TrySetException(ex);
					// Log.Error(ex);
				}
			}

			return await tcs.Task;
		}
        void UpdateToolbars()
        {
            // Enable editing buttons if the asset can be edited.
            EditButton.Enabled     = Asset.CanPerformEditOperation(PHAssetEditOperation.Content);
            FavoriteButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Properties);
            FavoriteButton.Title   = Asset.Favorite ? "♥︎" : "♡";

            // Enable the trash button if the asset can be deleted.
            if (AssetCollection != null)
            {
                TrashButton.Enabled = AssetCollection.CanPerformEditOperation(PHCollectionEditOperation.RemoveContent);
            }
            else
            {
                TrashButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Delete);
            }

            // Set the appropriate toolbarItems based on the mediaType of the asset.
            if (Asset.MediaType == PHAssetMediaType.Video)
            {
#if __TVOS__
                NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { PlayButton, FavoriteButton, TrashButton };
#elif __IOS__
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, PlayButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
            else
            {
#if __TVOS__
                // In tvOS, PHLivePhotoView doesn't do playback gestures,
                // so add a play button for Live Photos.
                if (Asset.PlaybackStyle == PHAssetPlaybackStyle.LivePhoto)
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { LivePhotoPlayButton, FavoriteButton, TrashButton }
                }
                ;
                else
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { FavoriteButton, TrashButton }
                };
#elif __IOS__
                // In iOS, present both stills and Live Photos the same way, because
                // PHLivePhotoView provides the same gesture-based UI as in Photos app.
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
        }

        void UpdateStillImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageForAsset(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (image, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (image == null)
                {
                    return;
                }

                // Now that we have the image, show it.
                ImageView.Hidden = false;
                ImageView.Image  = image;
            });
        }

        void UpdateLivePhoto()
        {
            // Prepare the options to pass when fetching the live photo.
            var options = new PHLivePhotoRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary dictionary) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() => ProgressView.Progress = (float)progress);
                }
            };

            ProgressView.Hidden = false;
            // Request the live photo for the asset from the default PHImageManager.
            PHImageManager.DefaultManager.RequestLivePhoto(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (livePhoto, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the live photo view and display the live photo.
                if (livePhoto == null)
                {
                    return;
                }

                // Now that we have the Live Photo, show it.
                ImageView.Hidden         = true;
                AnimatedImageView.Hidden = true;
                LivePhotoView.Hidden     = false;
                LivePhotoView.LivePhoto  = livePhoto;

                // Playback a short section of the live photo; similar to the Photos share sheet.
                if (!isPlayingHint)
                {
                    isPlayingHint = true;
                    LivePhotoView.StartPlayback(PHLivePhotoViewPlaybackStyle.Hint);
                }
            });
        }

        void UpdateAnimatedImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                Version              = PHImageRequestOptionsVersion.Original,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageData(Asset, options, (data, dataUti, orientation, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (data == null)
                {
                    return;
                }

                var animatedImage = new AnimatedImage(data);

                LivePhotoView.Hidden            = true;
                ImageView.Hidden                = true;
                AnimatedImageView.Hidden        = false;
                AnimatedImageView.AnimatedImage = animatedImage;
                AnimatedImageView.IsPlaying     = true;
            });
        }

        #endregion

        #region Asset editing

        void RevertAsset(UIAlertAction action)
        {
            PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
            {
                var request = PHAssetChangeRequest.ChangeRequest(Asset);
                request.RevertAssetContentToOriginal();
            }, (success, error) =>
            {
                if (!success)
                {
                    Console.WriteLine($"can't revert asset: {error.LocalizedDescription}");
                }
            });
        }

        void ApplyFilter(CIFilter filter)
        {
            // Set up a handler to make sure we can handle prior edits.
            var options = new PHContentEditingInputRequestOptions();

            options.CanHandleAdjustmentData = (adjustmentData =>
            {
                return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion);
            });

            // Prepare for editing.
            Asset.RequestContentEditingInput(options, (input, requestStatusInfo) =>
            {
                if (input == null)
                {
                    throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}");
                }

                // This handler gets called on the main thread; dispatch to a background queue for processing.
                DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() =>
                {
                    // Create a PHAdjustmentData object that describes the filter that was applied.
                    var adjustmentData = new PHAdjustmentData(
                        formatIdentifier,
                        formatVersion,
                        NSData.FromString(filter.Name, NSStringEncoding.UTF8));

                    // NOTE:
                    // This app's filter UI is fire-and-forget. That is, the user picks a filter,
                    // and the app applies it and outputs the saved asset immediately. There's
                    // no UI state for having chosen but not yet committed an edit. This means
                    // there's no role for reading adjustment data -- you do that to resume
                    // in-progress edits, and this sample app has no notion of "in-progress".
                    //
                    // However, it's still good to write adjustment data so that potential future
                    // versions of the app (or other apps that understand our adjustement data
                    // format) could make use of it.

                    // Create content editing output, write the adjustment data.
                    var output = new PHContentEditingOutput(input)
                    {
                        AdjustmentData = adjustmentData
                    };

                    // Select a filtering function for the asset's media type.
                    Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
                    if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive))
                    {
                        applyFunc = ApplyLivePhotoFilter;
                    }
                    else if (Asset.MediaType == PHAssetMediaType.Image)
                    {
                        applyFunc = ApplyPhotoFilter;
                    }
                    else
                    {
                        applyFunc = ApplyVideoFilter;
                    }

                    // Apply the filter.
                    applyFunc(filter, input, output, () =>
                    {
                        // When rendering is done, commit the edit to the Photos library.
                        PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
                        {
                            var request = PHAssetChangeRequest.ChangeRequest(Asset);
                            request.ContentEditingOutput = output;
                        }, (success, error) =>
                        {
                            if (!success)
                            {
                                Console.WriteLine($"can't edit asset: {error.LocalizedDescription}");
                            }
                        });
                    });
                });
            });
        }

        void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load the full size image.
            var inputImage = new CIImage(input.FullSizeImageUrl);

            if (inputImage == null)
            {
                throw new InvalidProgramException("can't load input image to edit");
            }

            // Apply the filter.
            filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation);
            var outputImage = filter.OutputImage;

            // Write the edited image as a JPEG.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
            NSError error;

            if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace, new NSDictionary(), out error))
            {
                throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}");
            }

            completion();
        }

        void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // This app filters assets only for output. In an app that previews
            // filters while editing, create a livePhotoContext early and reuse it
            // to render both for previewing and for final output.
            var livePhotoContext = new PHLivePhotoEditingContext(input);

            livePhotoContext.FrameProcessor2 = (IPHLivePhotoFrame frame, ref NSError _) =>
            {
                filter.Image = frame.Image;
                return(filter.OutputImage);
            };
            livePhotoContext.SaveLivePhoto(output, (PHLivePhotoEditingOption)null, (success, error) =>
            {
                if (success)
                {
                    completion();
                }
                else
                {
                    Console.WriteLine("can't output live photo");
                }
            });
            // Applying edits to a Live Photo currently crashes
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=58227
        }

        void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load AVAsset to process from input.
            var avAsset = input.AudiovisualAsset;

            if (avAsset == null)
            {
                throw new InvalidProgramException("can't get AV asset to edit");
            }

            // Set up a video composition to apply the filter.
            var composition = AVVideoComposition.CreateVideoComposition(avAsset, request =>
            {
                filter.Image = request.SourceImage;
                var filtered = filter.OutputImage;
                request.Finish(filtered, null);
            });

            // Export the video composition to the output URL.
            var export = new AVAssetExportSession(avAsset, AVAssetExportSessionPreset.HighestQuality)
            {
                OutputFileType   = AVFileType.QuickTimeMovie,
                OutputUrl        = output.RenderedContentUrl,
                VideoComposition = composition
            };

            export.ExportAsynchronously(completion);
        }
		void ApplyLivePhotoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
		{
			// This app filters assets only for output. In an app that previews
			// filters while editing, create a livePhotoContext early and reuse it
			// to render both for previewing and for final output.
			var livePhotoContext = new PHLivePhotoEditingContext (input);

			livePhotoContext.FrameProcessor = (frame, _) => {
				filter.Image = frame.Image;
				return filter.OutputImage;
			};
			livePhotoContext.SaveLivePhoto (output, null, (success, error) => {
				if (success)
					completion ();
				else
					Console.WriteLine ("can't output live photo");
			});
		}
		void ApplyFilter (CIFilter filter)
		{
			// Prepare the options to pass when requesting to edit the image.
			var options = new PHContentEditingInputRequestOptions ();
			options.SetCanHandleAdjustmentDataHandler (adjustmentData => {
				bool result = false;
				InvokeOnMainThread (() => {
					result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0";
				});

				return result;
			});

			Asset.RequestContentEditingInput (options,(contentEditingInput, requestStatusInfo) => {
				// Create a CIImage from the full image representation.
				var url = contentEditingInput.FullSizeImageUrl;
				int orientation = (int)contentEditingInput.FullSizeImageOrientation;
				var inputImage = CIImage.FromUrl (url);
				inputImage = inputImage.CreateWithOrientation ((CIImageOrientation)orientation);

				// Create the filter to apply.
				filter.SetDefaults ();
				filter.Image = inputImage;

				// Apply the filter.
				CIImage outputImage = filter.OutputImage;

				// Create a PHAdjustmentData object that describes the filter that was applied.
				var adjustmentData = new PHAdjustmentData (
					AdjustmentFormatIdentifier,
					"1.0",
					NSData.FromString (filter.Name, NSStringEncoding.UTF8)
				);

				var contentEditingOutput = new PHContentEditingOutput (contentEditingInput);
				NSData jpegData = outputImage.GetJpegRepresentation (0.9f);
				jpegData.Save (contentEditingOutput.RenderedContentUrl, true);
				contentEditingOutput.AdjustmentData = adjustmentData;

				// Ask the shared PHPhotoLinrary to perform the changes.
				PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => {
					var request = PHAssetChangeRequest.ChangeRequest (Asset);
						request.ContentEditingOutput = contentEditingOutput;
					}, (success, error) => {
					if (!success)
						Console.WriteLine ("Error: {0}", error.LocalizedDescription);
				});
			});
		}
		void ApplyVideoFilter (CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
		{
			// Load AVAsset to process from input.
			var avAsset = input.AudiovisualAsset;
			if (avAsset == null)
				throw new InvalidProgramException ("can't get AV asset to edit");

			// Set up a video composition to apply the filter.
			var composition = AVVideoComposition.CreateVideoComposition (avAsset, request => {
				filter.Image = request.SourceImage;
				var filtered = filter.OutputImage;
				request.Finish (filtered, null);
			});

			// Export the video composition to the output URL.
			// TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44443
			var export = new AVAssetExportSession (avAsset, AVAssetExportSession.PresetHighestQuality) {
				OutputFileType = AVFileType.QuickTimeMovie,
				OutputUrl = output.RenderedContentUrl,
				VideoComposition = composition
			};
			export.ExportAsynchronously (completion);
		}