void CapturePhoto(NSObject sender) { /* * Retrieve the video preview layer's video orientation on the main queue before * entering the session queue. We do this to ensure UI elements are accessed on * the main thread and session configuration is done on the session queue. */ var videoPreviewLayerVideoOrientation = VideoPreviewLayer.Connection.VideoOrientation; sessionQueue.DispatchAsync(() => { // Update the photo output's connection to match the video orientation of the video preview layer. var photoOutputConnection = photoOutput.ConnectionFromMediaType(AVMediaType.Video); photoOutputConnection.VideoOrientation = videoPreviewLayerVideoOrientation; AVCapturePhotoSettings photoSettings; // Capture HEIF photo when supported, with flash set to auto and high resolution photo enabled. if (photoOutput.AvailablePhotoCodecTypes.Where(codec => codec == AVVideo2.CodecHEVC).Any()) { photoSettings = AVCapturePhotoSettings.FromFormat(new NSDictionary <NSString, NSObject>(AVVideo.CodecKey, AVVideo2.CodecHEVC)); } else { photoSettings = AVCapturePhotoSettings.Create(); } if (videoDeviceInput.Device.FlashAvailable) { photoSettings.FlashMode = AVCaptureFlashMode.Auto; } photoSettings.IsHighResolutionPhotoEnabled = true; if (photoSettings.AvailablePreviewPhotoPixelFormatTypes.Count() > 0) { photoSettings.PreviewPhotoFormat = new NSDictionary <NSString, NSObject>(CoreVideo.CVPixelBuffer.PixelFormatTypeKey, photoSettings.AvailablePreviewPhotoPixelFormatTypes.First()); } if (livePhotoMode == AVCamLivePhotoMode.On && photoOutput.IsLivePhotoCaptureSupported) { // Live Photo capture is not supported in movie mode. var livePhotoMovieFileName = Guid.NewGuid().ToString(); var livePhotoMovieFilePath = NSFileManager.DefaultManager.GetTemporaryDirectory().Append($"{livePhotoMovieFileName}.mov", false); photoSettings.LivePhotoMovieFileUrl = livePhotoMovieFilePath; } if (depthDataDeliveryMode == AVCamDepthDataDeliveryMode.On && photoOutput.IsDepthDataDeliverySupported()) { photoSettings.IsDepthDataDeliveryEnabled(true); } else { photoSettings.IsDepthDataDeliveryEnabled(false); } // Use a separate object for the photo capture delegate to isolate each capture life cycle. var photoCaptureDelegate = new AVCamPhotoCaptureDelegate(photoSettings, () => { DispatchQueue.MainQueue.DispatchAsync(() => { VideoPreviewLayer.Opacity = 0.0f; UIView.Animate(0.25, () => { VideoPreviewLayer.Opacity = 1.0f; }); }); }, (bool capturing) => { /* * Because Live Photo captures can overlap, we need to keep track of the * number of in progress Live Photo captures to ensure that the * Live Photo label stays visible during these captures. */ sessionQueue.DispatchAsync(() => { if (capturing) { inProgressLivePhotoCapturesCount++; } else { inProgressLivePhotoCapturesCount--; } var lInProgressLivePhotoCapturesCount = inProgressLivePhotoCapturesCount; //DispatchQueue.MainQueue.DispatchAsync(() => //{ // if (lInProgressLivePhotoCapturesCount > 0) // { // CapturingLivePhotoLabel.Hidden = false; // } // else if (lInProgressLivePhotoCapturesCount == 0) // { // CapturingLivePhotoLabel.Hidden = true; // } // else // { // Console.WriteLine(@"Error: In progress live photo capture count is less than 0"); // } //}); }); }, (AVCamPhotoCaptureDelegate lPhotoCaptureDelegate) => { // When the capture is complete, remove a reference to the photo capture delegate so it can be deallocated. sessionQueue.DispatchAsync(() => { inProgressPhotoCaptureDelegates[lPhotoCaptureDelegate.RequestedPhotoSettings.UniqueID] = null; }); }); /* * The Photo Output keeps a weak reference to the photo capture delegate so * we store it in an array to maintain a strong reference to this object * until the capture is completed. */ inProgressPhotoCaptureDelegates[photoCaptureDelegate.RequestedPhotoSettings.UniqueID] = photoCaptureDelegate; photoOutput.CapturePhoto(photoSettings, photoCaptureDelegate); }); }