Example #1
0
        public static async Task <string> GetIOSFilePath(this PHAsset photo)
        {
            var tcs = new TaskCompletionSource <NSUrl>();

            if (photo.MediaType == PHAssetMediaType.Image)
            {
                var options = new PHContentEditingInputRequestOptions();
                options.CanHandleAdjustmentData = _ => true;
                photo.RequestContentEditingInput(options, (contentEditingInput, requestStatusInfo) => {
                    tcs.SetResult(contentEditingInput.FullSizeImageUrl);
                });
            }
            else if (photo.MediaType == PHAssetMediaType.Video)
            {
                var options = new PHVideoRequestOptions();
                options.Version = PHVideoRequestOptionsVersion.Original;
                PHImageManager.DefaultManager.RequestAvAsset(photo, options, (asset, audioMix, info) => {
                    if (asset is AVUrlAsset urlAsset)
                    {
                        tcs.SetResult(urlAsset.Url);
                        return;
                    }
                    tcs.SetException(new InvalidDataException("RequestAvAsset didn't get AVUrlAsset"));
                });
            }
            var origfilepath = await tcs.Task.ConfigureAwait(false);

            return(origfilepath.Path);
        }
        public void IF_ImageSelected(int positionDirectory, int positionImage, ImageSource imageSource, byte[] stream)
        {
            var item = galleryDirectories[CurrentParent].Images[positionImage];

            item.galleryImageXF.Checked = !item.galleryImageXF.Checked;
            collectionView.ReloadData();

            if (item.galleryImageXF.Checked)
            {
                var options = new PHContentEditingInputRequestOptions()
                {
                };

                item.Image.RequestContentEditingInput(options, (contentEditingInput, requestStatusInfo) =>
                {
                    var Key = new NSString("PHContentEditingInputResultIsInCloudKey");
                    if (requestStatusInfo.ContainsKey(Key))
                    {
                        var valueOfKey = requestStatusInfo.ObjectForKey(Key);
                        if (valueOfKey.ToString().Equals("1"))
                        {
                            item.galleryImageXF.CloudStorage = true;
                        }
                        else
                        {
                            item.galleryImageXF.CloudStorage = false;
                            //item.Path = contentEditingInput.FullSizeImageUrl.ToString().Substring(7);
                        }
                    }
                });
            }
            else
            {
                item.galleryImageXF.OriginalPath = null;
            }

            if (imageSource != null)
            {
                item.galleryImageXF.ImageSourceXF = imageSource;
            }
            if (stream != null)
            {
                item.galleryImageXF.ImageRawData = stream;
            }

            var count = GetCurrentSelected().Count;

            if (count > 0)
            {
                ButtonDone.SetTitle("Done (" + count + ")", UIControlState.Normal);
            }
            else
            {
                ButtonDone.SetTitle("Done", UIControlState.Normal);
            }
        }
        void ApplyFilter(CIFilter filter)
        {
            // Prepare the options to pass when requesting to edit the image.
            var options = new PHContentEditingInputRequestOptions();

            options.SetCanHandleAdjustmentDataHandler(adjustmentData => {
                bool result = false;
                InvokeOnMainThread(() => {
                    result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0";
                });

                return(result);
            });

            Asset.RequestContentEditingInput(options, (contentEditingInput, requestStatusInfo) => {
                // Create a CIImage from the full image representation.
                var url         = contentEditingInput.FullSizeImageUrl;
                int orientation = (int)contentEditingInput.FullSizeImageOrientation;
                var inputImage  = CIImage.FromUrl(url);
                inputImage      = inputImage.CreateWithOrientation((CIImageOrientation)orientation);

                // Create the filter to apply.
                filter.SetDefaults();
                filter.Image = inputImage;

                // Apply the filter.
                CIImage outputImage = filter.OutputImage;

                // Create a PHAdjustmentData object that describes the filter that was applied.
                var adjustmentData = new PHAdjustmentData(
                    AdjustmentFormatIdentifier,
                    "1.0",
                    NSData.FromString(filter.Name, NSStringEncoding.UTF8)
                    );

                var contentEditingOutput = new PHContentEditingOutput(contentEditingInput);
                NSData jpegData          = outputImage.GetJpegRepresentation(0.9f);
                jpegData.Save(contentEditingOutput.RenderedContentUrl, true);
                contentEditingOutput.AdjustmentData = adjustmentData;

                // Ask the shared PHPhotoLinrary to perform the changes.
                PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                    var request = PHAssetChangeRequest.ChangeRequest(Asset);
                    request.ContentEditingOutput = contentEditingOutput;
                }, (success, error) => {
                    if (!success)
                    {
                        Console.WriteLine("Error: {0}", error.LocalizedDescription);
                    }
                });
            });
        }
Example #4
0
        void ApplyFilter(CIFilter filter)
        {
            // Set up a handler to make sure we can handle prior edits.
            var options = new PHContentEditingInputRequestOptions();

            options.CanHandleAdjustmentData = (adjustmentData => {
                return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion);
            });

            // Prepare for editing.
            Asset.RequestContentEditingInput(options, (input, requestStatusInfo) => {
                if (input == null)
                {
                    throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}");
                }

                // This handler gets called on the main thread; dispatch to a background queue for processing.
                DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() => {
                    // Create a PHAdjustmentData object that describes the filter that was applied.
                    var adjustmentData = new PHAdjustmentData(
                        formatIdentifier,
                        formatVersion,
                        NSData.FromString(filter.Name, NSStringEncoding.UTF8));

                    // NOTE:
                    // This app's filter UI is fire-and-forget. That is, the user picks a filter,
                    // and the app applies it and outputs the saved asset immediately. There's
                    // no UI state for having chosen but not yet committed an edit. This means
                    // there's no role for reading adjustment data -- you do that to resume
                    // in-progress edits, and this sample app has no notion of "in-progress".
                    //
                    // However, it's still good to write adjustment data so that potential future
                    // versions of the app (or other apps that understand our adjustement data
                    // format) could make use of it.

                    // Create content editing output, write the adjustment data.
                    var output = new PHContentEditingOutput(input)
                    {
                        AdjustmentData = adjustmentData
                    };

                    // Select a filtering function for the asset's media type.
                    Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
                    if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive))
                    {
                        applyFunc = ApplyLivePhotoFilter;
                    }
                    else if (Asset.MediaType == PHAssetMediaType.Image)
                    {
                        applyFunc = ApplyPhotoFilter;
                    }
                    else
                    {
                        applyFunc = ApplyVideoFilter;
                    }

                    // Apply the filter.
                    applyFunc(filter, input, output, () => {
                        // When rendering is done, commit the edit to the Photos library.
                        PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => {
                            var request = PHAssetChangeRequest.ChangeRequest(Asset);
                            request.ContentEditingOutput = output;
                        }, (success, error) => {
                            if (!success)
                            {
                                Console.WriteLine($"can't edit asset: {error.LocalizedDescription}");
                            }
                        });
                    });
                });
            });
        }
        void UpdateToolbars()
        {
            // Enable editing buttons if the asset can be edited.
            EditButton.Enabled     = Asset.CanPerformEditOperation(PHAssetEditOperation.Content);
            FavoriteButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Properties);
            FavoriteButton.Title   = Asset.Favorite ? "♥︎" : "♡";

            // Enable the trash button if the asset can be deleted.
            if (AssetCollection != null)
            {
                TrashButton.Enabled = AssetCollection.CanPerformEditOperation(PHCollectionEditOperation.RemoveContent);
            }
            else
            {
                TrashButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Delete);
            }

            // Set the appropriate toolbarItems based on the mediaType of the asset.
            if (Asset.MediaType == PHAssetMediaType.Video)
            {
#if __TVOS__
                NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { PlayButton, FavoriteButton, TrashButton };
#elif __IOS__
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, PlayButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
            else
            {
#if __TVOS__
                // In tvOS, PHLivePhotoView doesn't do playback gestures,
                // so add a play button for Live Photos.
                if (Asset.PlaybackStyle == PHAssetPlaybackStyle.LivePhoto)
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { LivePhotoPlayButton, FavoriteButton, TrashButton }
                }
                ;
                else
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { FavoriteButton, TrashButton }
                };
#elif __IOS__
                // In iOS, present both stills and Live Photos the same way, because
                // PHLivePhotoView provides the same gesture-based UI as in Photos app.
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
        }

        void UpdateStillImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageForAsset(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (image, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (image == null)
                {
                    return;
                }

                // Now that we have the image, show it.
                ImageView.Hidden = false;
                ImageView.Image  = image;
            });
        }

        void UpdateLivePhoto()
        {
            // Prepare the options to pass when fetching the live photo.
            var options = new PHLivePhotoRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary dictionary) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() => ProgressView.Progress = (float)progress);
                }
            };

            ProgressView.Hidden = false;
            // Request the live photo for the asset from the default PHImageManager.
            PHImageManager.DefaultManager.RequestLivePhoto(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (livePhoto, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the live photo view and display the live photo.
                if (livePhoto == null)
                {
                    return;
                }

                // Now that we have the Live Photo, show it.
                ImageView.Hidden         = true;
                AnimatedImageView.Hidden = true;
                LivePhotoView.Hidden     = false;
                LivePhotoView.LivePhoto  = livePhoto;

                // Playback a short section of the live photo; similar to the Photos share sheet.
                if (!isPlayingHint)
                {
                    isPlayingHint = true;
                    LivePhotoView.StartPlayback(PHLivePhotoViewPlaybackStyle.Hint);
                }
            });
        }

        void UpdateAnimatedImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                Version              = PHImageRequestOptionsVersion.Original,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageData(Asset, options, (data, dataUti, orientation, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (data == null)
                {
                    return;
                }

                var animatedImage = new AnimatedImage(data);

                LivePhotoView.Hidden            = true;
                ImageView.Hidden                = true;
                AnimatedImageView.Hidden        = false;
                AnimatedImageView.AnimatedImage = animatedImage;
                AnimatedImageView.IsPlaying     = true;
            });
        }

        #endregion

        #region Asset editing

        void RevertAsset(UIAlertAction action)
        {
            PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
            {
                var request = PHAssetChangeRequest.ChangeRequest(Asset);
                request.RevertAssetContentToOriginal();
            }, (success, error) =>
            {
                if (!success)
                {
                    Console.WriteLine($"can't revert asset: {error.LocalizedDescription}");
                }
            });
        }

        void ApplyFilter(CIFilter filter)
        {
            // Set up a handler to make sure we can handle prior edits.
            var options = new PHContentEditingInputRequestOptions();

            options.CanHandleAdjustmentData = (adjustmentData =>
            {
                return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion);
            });

            // Prepare for editing.
            Asset.RequestContentEditingInput(options, (input, requestStatusInfo) =>
            {
                if (input == null)
                {
                    throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}");
                }

                // This handler gets called on the main thread; dispatch to a background queue for processing.
                DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() =>
                {
                    // Create a PHAdjustmentData object that describes the filter that was applied.
                    var adjustmentData = new PHAdjustmentData(
                        formatIdentifier,
                        formatVersion,
                        NSData.FromString(filter.Name, NSStringEncoding.UTF8));

                    // NOTE:
                    // This app's filter UI is fire-and-forget. That is, the user picks a filter,
                    // and the app applies it and outputs the saved asset immediately. There's
                    // no UI state for having chosen but not yet committed an edit. This means
                    // there's no role for reading adjustment data -- you do that to resume
                    // in-progress edits, and this sample app has no notion of "in-progress".
                    //
                    // However, it's still good to write adjustment data so that potential future
                    // versions of the app (or other apps that understand our adjustement data
                    // format) could make use of it.

                    // Create content editing output, write the adjustment data.
                    var output = new PHContentEditingOutput(input)
                    {
                        AdjustmentData = adjustmentData
                    };

                    // Select a filtering function for the asset's media type.
                    Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
                    if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive))
                    {
                        applyFunc = ApplyLivePhotoFilter;
                    }
                    else if (Asset.MediaType == PHAssetMediaType.Image)
                    {
                        applyFunc = ApplyPhotoFilter;
                    }
                    else
                    {
                        applyFunc = ApplyVideoFilter;
                    }

                    // Apply the filter.
                    applyFunc(filter, input, output, () =>
                    {
                        // When rendering is done, commit the edit to the Photos library.
                        PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
                        {
                            var request = PHAssetChangeRequest.ChangeRequest(Asset);
                            request.ContentEditingOutput = output;
                        }, (success, error) =>
                        {
                            if (!success)
                            {
                                Console.WriteLine($"can't edit asset: {error.LocalizedDescription}");
                            }
                        });
                    });
                });
            });
        }

        void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load the full size image.
            var inputImage = new CIImage(input.FullSizeImageUrl);

            if (inputImage == null)
            {
                throw new InvalidProgramException("can't load input image to edit");
            }

            // Apply the filter.
            filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation);
            var outputImage = filter.OutputImage;

            // Write the edited image as a JPEG.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
            NSError error;

            if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace, new NSDictionary(), out error))
            {
                throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}");
            }

            completion();
        }

        void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // This app filters assets only for output. In an app that previews
            // filters while editing, create a livePhotoContext early and reuse it
            // to render both for previewing and for final output.
            var livePhotoContext = new PHLivePhotoEditingContext(input);

            livePhotoContext.FrameProcessor2 = (IPHLivePhotoFrame frame, ref NSError _) =>
            {
                filter.Image = frame.Image;
                return(filter.OutputImage);
            };
            livePhotoContext.SaveLivePhoto(output, (PHLivePhotoEditingOption)null, (success, error) =>
            {
                if (success)
                {
                    completion();
                }
                else
                {
                    Console.WriteLine("can't output live photo");
                }
            });
            // Applying edits to a Live Photo currently crashes
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=58227
        }

        void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load AVAsset to process from input.
            var avAsset = input.AudiovisualAsset;

            if (avAsset == null)
            {
                throw new InvalidProgramException("can't get AV asset to edit");
            }

            // Set up a video composition to apply the filter.
            var composition = AVVideoComposition.CreateVideoComposition(avAsset, request =>
            {
                filter.Image = request.SourceImage;
                var filtered = filter.OutputImage;
                request.Finish(filtered, null);
            });

            // Export the video composition to the output URL.
            var export = new AVAssetExportSession(avAsset, AVAssetExportSessionPreset.HighestQuality)
            {
                OutputFileType   = AVFileType.QuickTimeMovie,
                OutputUrl        = output.RenderedContentUrl,
                VideoComposition = composition
            };

            export.ExportAsynchronously(completion);
        }
		void ApplyFilter (CIFilter filter)
		{
			// Prepare the options to pass when requesting to edit the image.
			var options = new PHContentEditingInputRequestOptions ();
			options.SetCanHandleAdjustmentDataHandler (adjustmentData => {
				bool result = false;
				InvokeOnMainThread (() => {
					result = adjustmentData.FormatIdentifier == AdjustmentFormatIdentifier && adjustmentData.FormatVersion == "1.0";
				});

				return result;
			});

			Asset.RequestContentEditingInput (options,(contentEditingInput, requestStatusInfo) => {
				// Create a CIImage from the full image representation.
				var url = contentEditingInput.FullSizeImageUrl;
				int orientation = (int)contentEditingInput.FullSizeImageOrientation;
				var inputImage = CIImage.FromUrl (url);
				inputImage = inputImage.CreateWithOrientation ((CIImageOrientation)orientation);

				// Create the filter to apply.
				filter.SetDefaults ();
				filter.Image = inputImage;

				// Apply the filter.
				CIImage outputImage = filter.OutputImage;

				// Create a PHAdjustmentData object that describes the filter that was applied.
				var adjustmentData = new PHAdjustmentData (
					AdjustmentFormatIdentifier,
					"1.0",
					NSData.FromString (filter.Name, NSStringEncoding.UTF8)
				);

				var contentEditingOutput = new PHContentEditingOutput (contentEditingInput);
				NSData jpegData = outputImage.GetJpegRepresentation (0.9f);
				jpegData.Save (contentEditingOutput.RenderedContentUrl, true);
				contentEditingOutput.AdjustmentData = adjustmentData;

				// Ask the shared PHPhotoLinrary to perform the changes.
				PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => {
					var request = PHAssetChangeRequest.ChangeRequest (Asset);
						request.ContentEditingOutput = contentEditingOutput;
					}, (success, error) => {
					if (!success)
						Console.WriteLine ("Error: {0}", error.LocalizedDescription);
				});
			});
		}
		void ApplyFilter (CIFilter filter)
		{
			// Set up a handler to make sure we can handle prior edits.
			var options = new PHContentEditingInputRequestOptions ();
			options.CanHandleAdjustmentData = (adjustmentData => {
				return adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion;
			});

			// Prepare for editing.
			Asset.RequestContentEditingInput (options, (input, requestStatusInfo) => {
				if (input == null)
					throw new InvalidProgramException ($"can't get content editing input: {requestStatusInfo}");

				// This handler gets called on the main thread; dispatch to a background queue for processing.
				DispatchQueue.GetGlobalQueue (DispatchQueuePriority.Default).DispatchAsync (() => {
					// Create a PHAdjustmentData object that describes the filter that was applied.
					var adjustmentData = new PHAdjustmentData (
					formatIdentifier,
					formatVersion,
					NSData.FromString (filter.Name, NSStringEncoding.UTF8));

					// NOTE:
					// This app's filter UI is fire-and-forget. That is, the user picks a filter, 
					// and the app applies it and outputs the saved asset immediately. There's 
					// no UI state for having chosen but not yet committed an edit. This means
					// there's no role for reading adjustment data -- you do that to resume
					// in-progress edits, and this sample app has no notion of "in-progress".
					//
					// However, it's still good to write adjustment data so that potential future
					// versions of the app (or other apps that understand our adjustement data
					// format) could make use of it.

					// Create content editing output, write the adjustment data.
					var output = new PHContentEditingOutput (input) {
						AdjustmentData = adjustmentData
					};

					// Select a filtering function for the asset's media type.
					Action<CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
					if (Asset.MediaSubtypes.HasFlag (PHAssetMediaSubtype.PhotoLive))
						applyFunc = ApplyLivePhotoFilter;
					else if (Asset.MediaType == PHAssetMediaType.Image)
						applyFunc = ApplyPhotoFilter;
					else
						applyFunc = ApplyVideoFilter;

					// Apply the filter.
					applyFunc (filter, input, output, () => {
						// When rendering is done, commit the edit to the Photos library.
						PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => {
							var request = PHAssetChangeRequest.ChangeRequest (Asset);
							request.ContentEditingOutput = output;
						}, (success, error) => {
							if (!success)
								Console.WriteLine ($"can't edit asset: {error.LocalizedDescription}");
						});
					});
				});
			});
		}