void UpdateImage()
        {
            var image = AnimatedImage?.ImageAtIndex(displayedIndex);

            if (image != null && displayView != null)
            {
                displayView.Layer.Contents = image;
            }
        }
        void UpdateToolbars()
        {
            // Enable editing buttons if the asset can be edited.
            EditButton.Enabled     = Asset.CanPerformEditOperation(PHAssetEditOperation.Content);
            FavoriteButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Properties);
            FavoriteButton.Title   = Asset.Favorite ? "♥︎" : "♡";

            // Enable the trash button if the asset can be deleted.
            if (AssetCollection != null)
            {
                TrashButton.Enabled = AssetCollection.CanPerformEditOperation(PHCollectionEditOperation.RemoveContent);
            }
            else
            {
                TrashButton.Enabled = Asset.CanPerformEditOperation(PHAssetEditOperation.Delete);
            }

            // Set the appropriate toolbarItems based on the mediaType of the asset.
            if (Asset.MediaType == PHAssetMediaType.Video)
            {
#if __TVOS__
                NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { PlayButton, FavoriteButton, TrashButton };
#elif __IOS__
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, PlayButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
            else
            {
#if __TVOS__
                // In tvOS, PHLivePhotoView doesn't do playback gestures,
                // so add a play button for Live Photos.
                if (Asset.PlaybackStyle == PHAssetPlaybackStyle.LivePhoto)
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { LivePhotoPlayButton, FavoriteButton, TrashButton }
                }
                ;
                else
                {
                    NavigationItem.LeftBarButtonItems = new UIBarButtonItem[] { FavoriteButton, TrashButton }
                };
#elif __IOS__
                // In iOS, present both stills and Live Photos the same way, because
                // PHLivePhotoView provides the same gesture-based UI as in Photos app.
                ToolbarItems = new UIBarButtonItem[] { FavoriteButton, Space, TrashButton };
                if (NavigationController != null)
                {
                    NavigationController.ToolbarHidden = false;
                }
#endif
            }
        }

        void UpdateStillImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageForAsset(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (image, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (image == null)
                {
                    return;
                }

                // Now that we have the image, show it.
                ImageView.Hidden = false;
                ImageView.Image  = image;
            });
        }

        void UpdateLivePhoto()
        {
            // Prepare the options to pass when fetching the live photo.
            var options = new PHLivePhotoRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary dictionary) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() => ProgressView.Progress = (float)progress);
                }
            };

            ProgressView.Hidden = false;
            // Request the live photo for the asset from the default PHImageManager.
            PHImageManager.DefaultManager.RequestLivePhoto(Asset, GetTargetSize(), PHImageContentMode.AspectFit, options, (livePhoto, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the live photo view and display the live photo.
                if (livePhoto == null)
                {
                    return;
                }

                // Now that we have the Live Photo, show it.
                ImageView.Hidden         = true;
                AnimatedImageView.Hidden = true;
                LivePhotoView.Hidden     = false;
                LivePhotoView.LivePhoto  = livePhoto;

                // Playback a short section of the live photo; similar to the Photos share sheet.
                if (!isPlayingHint)
                {
                    isPlayingHint = true;
                    LivePhotoView.StartPlayback(PHLivePhotoViewPlaybackStyle.Hint);
                }
            });
        }

        void UpdateAnimatedImage()
        {
            // Prepare the options to pass when fetching the (photo, or video preview) image.
            var options = new PHImageRequestOptions
            {
                DeliveryMode         = PHImageRequestOptionsDeliveryMode.HighQualityFormat,
                Version              = PHImageRequestOptionsVersion.Original,
                NetworkAccessAllowed = true,
                ProgressHandler      = (double progress, NSError error, out bool stop, NSDictionary info) =>
                {
                    stop = false;
                    // Handler might not be called on the main queue, so re-dispatch for UI work.
                    DispatchQueue.MainQueue.DispatchSync(() =>
                    {
                        ProgressView.Progress = (float)progress;
                    });
                }
            };

            ProgressView.Hidden = false;
            PHImageManager.DefaultManager.RequestImageData(Asset, options, (data, dataUti, orientation, info) =>
            {
                // Hide the progress view now the request has completed.
                ProgressView.Hidden = true;

                // If successful, show the image view and display the image.
                if (data == null)
                {
                    return;
                }

                var animatedImage = new AnimatedImage(data);

                LivePhotoView.Hidden            = true;
                ImageView.Hidden                = true;
                AnimatedImageView.Hidden        = false;
                AnimatedImageView.AnimatedImage = animatedImage;
                AnimatedImageView.IsPlaying     = true;
            });
        }

        #endregion

        #region Asset editing

        void RevertAsset(UIAlertAction action)
        {
            PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
            {
                var request = PHAssetChangeRequest.ChangeRequest(Asset);
                request.RevertAssetContentToOriginal();
            }, (success, error) =>
            {
                if (!success)
                {
                    Console.WriteLine($"can't revert asset: {error.LocalizedDescription}");
                }
            });
        }

        void ApplyFilter(CIFilter filter)
        {
            // Set up a handler to make sure we can handle prior edits.
            var options = new PHContentEditingInputRequestOptions();

            options.CanHandleAdjustmentData = (adjustmentData =>
            {
                return(adjustmentData.FormatIdentifier == formatIdentifier && adjustmentData.FormatVersion == formatVersion);
            });

            // Prepare for editing.
            Asset.RequestContentEditingInput(options, (input, requestStatusInfo) =>
            {
                if (input == null)
                {
                    throw new InvalidProgramException($"can't get content editing input: {requestStatusInfo}");
                }

                // This handler gets called on the main thread; dispatch to a background queue for processing.
                DispatchQueue.GetGlobalQueue(DispatchQueuePriority.Default).DispatchAsync(() =>
                {
                    // Create a PHAdjustmentData object that describes the filter that was applied.
                    var adjustmentData = new PHAdjustmentData(
                        formatIdentifier,
                        formatVersion,
                        NSData.FromString(filter.Name, NSStringEncoding.UTF8));

                    // NOTE:
                    // This app's filter UI is fire-and-forget. That is, the user picks a filter,
                    // and the app applies it and outputs the saved asset immediately. There's
                    // no UI state for having chosen but not yet committed an edit. This means
                    // there's no role for reading adjustment data -- you do that to resume
                    // in-progress edits, and this sample app has no notion of "in-progress".
                    //
                    // However, it's still good to write adjustment data so that potential future
                    // versions of the app (or other apps that understand our adjustement data
                    // format) could make use of it.

                    // Create content editing output, write the adjustment data.
                    var output = new PHContentEditingOutput(input)
                    {
                        AdjustmentData = adjustmentData
                    };

                    // Select a filtering function for the asset's media type.
                    Action <CIFilter, PHContentEditingInput, PHContentEditingOutput, Action> applyFunc;
                    if (Asset.MediaSubtypes.HasFlag(PHAssetMediaSubtype.PhotoLive))
                    {
                        applyFunc = ApplyLivePhotoFilter;
                    }
                    else if (Asset.MediaType == PHAssetMediaType.Image)
                    {
                        applyFunc = ApplyPhotoFilter;
                    }
                    else
                    {
                        applyFunc = ApplyVideoFilter;
                    }

                    // Apply the filter.
                    applyFunc(filter, input, output, () =>
                    {
                        // When rendering is done, commit the edit to the Photos library.
                        PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() =>
                        {
                            var request = PHAssetChangeRequest.ChangeRequest(Asset);
                            request.ContentEditingOutput = output;
                        }, (success, error) =>
                        {
                            if (!success)
                            {
                                Console.WriteLine($"can't edit asset: {error.LocalizedDescription}");
                            }
                        });
                    });
                });
            });
        }

        void ApplyPhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load the full size image.
            var inputImage = new CIImage(input.FullSizeImageUrl);

            if (inputImage == null)
            {
                throw new InvalidProgramException("can't load input image to edit");
            }

            // Apply the filter.
            filter.Image = inputImage.CreateWithOrientation(input.FullSizeImageOrientation);
            var outputImage = filter.OutputImage;

            // Write the edited image as a JPEG.
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=44503
            NSError error;

            if (!ciContext.WriteJpegRepresentation(outputImage, output.RenderedContentUrl, inputImage.ColorSpace, new NSDictionary(), out error))
            {
                throw new InvalidProgramException($"can't apply filter to image: {error.LocalizedDescription}");
            }

            completion();
        }

        void ApplyLivePhotoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // This app filters assets only for output. In an app that previews
            // filters while editing, create a livePhotoContext early and reuse it
            // to render both for previewing and for final output.
            var livePhotoContext = new PHLivePhotoEditingContext(input);

            livePhotoContext.FrameProcessor2 = (IPHLivePhotoFrame frame, ref NSError _) =>
            {
                filter.Image = frame.Image;
                return(filter.OutputImage);
            };
            livePhotoContext.SaveLivePhoto(output, (PHLivePhotoEditingOption)null, (success, error) =>
            {
                if (success)
                {
                    completion();
                }
                else
                {
                    Console.WriteLine("can't output live photo");
                }
            });
            // Applying edits to a Live Photo currently crashes
            // TODO: https://bugzilla.xamarin.com/show_bug.cgi?id=58227
        }

        void ApplyVideoFilter(CIFilter filter, PHContentEditingInput input, PHContentEditingOutput output, Action completion)
        {
            // Load AVAsset to process from input.
            var avAsset = input.AudiovisualAsset;

            if (avAsset == null)
            {
                throw new InvalidProgramException("can't get AV asset to edit");
            }

            // Set up a video composition to apply the filter.
            var composition = AVVideoComposition.CreateVideoComposition(avAsset, request =>
            {
                filter.Image = request.SourceImage;
                var filtered = filter.OutputImage;
                request.Finish(filtered, null);
            });

            // Export the video composition to the output URL.
            var export = new AVAssetExportSession(avAsset, AVAssetExportSessionPreset.HighestQuality)
            {
                OutputFileType   = AVFileType.QuickTimeMovie,
                OutputUrl        = output.RenderedContentUrl,
                VideoComposition = composition
            };

            export.ExportAsynchronously(completion);
        }
        void TimerFired()
        {
            if (!ShouldAnimate() || AnimatedImage == null)
            {
                return;
            }

            var timestamp = displayLink.Timestamp;

            // If this is the first callback, set things up
            if (!hasStartedAnimating)
            {
                elapsedTime         = 0.0;
                previousTime        = timestamp;
                hasStartedAnimating = true;
            }

            var currentDelayTime = AnimatedImage.DelayAtIndex(displayedIndex);

            elapsedTime += timestamp - previousTime;
            previousTime = timestamp;

            // Aaccount for big gaps in playback by just resuming from now
            // e.g. user presses home button and comes back after a while.
            // Allow for the possibility of the current delay time being relatively long
            if (elapsedTime >= Math.Max(10.0, currentDelayTime + 1.0))
            {
                elapsedTime = 0.0;
            }

            var changedFrame = false;

            while (elapsedTime >= currentDelayTime)
            {
                elapsedTime    -= currentDelayTime;
                displayedIndex += 1;
                changedFrame    = true;
                if (displayedIndex >= AnimatedImage.FrameCount)
                {
                    // Time to loop. Start infinite loops over, otherwise decrement loop count and stop if done
                    if (isInfiniteLoop)
                    {
                        displayedIndex = 0;
                    }
                    else
                    {
                        remainingLoopCount -= 1;
                        if (remainingLoopCount == 0)
                        {
                            hasFinishedAnimating = true;
                            DispatchQueue.MainQueue.DispatchAsync(() =>
                            {
                                UpdateAnimation();
                            });
                        }
                        else
                        {
                            displayedIndex = 0;
                        }
                    }
                }
            }

            if (changedFrame)
            {
                UpdateImage();
            }
        }