void ChangeCamera(CameraViewController sender) { CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; SessionQueue.DispatchAsync(() => { AVCaptureDevice currentVideoDevice = VideoDeviceInput.Device; AVCaptureDevicePosition preferredPosition = AVCaptureDevicePosition.Unspecified; AVCaptureDevicePosition currentPosition = currentVideoDevice.Position; switch (currentPosition) { case AVCaptureDevicePosition.Unspecified: case AVCaptureDevicePosition.Front: preferredPosition = AVCaptureDevicePosition.Back; break; case AVCaptureDevicePosition.Back: preferredPosition = AVCaptureDevicePosition.Front; break; } AVCaptureDevice videoDevice = CreateDevice(AVMediaType.Video, preferredPosition); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice); Session.BeginConfiguration(); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. Session.RemoveInput(VideoDeviceInput); if (Session.CanAddInput(videoDeviceInput)) { subjectSubscriber.Dispose(); SetFlashModeForDevice(AVCaptureFlashMode.Auto, videoDevice); subjectSubscriber = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureDevice.SubjectAreaDidChangeNotification, SubjectAreaDidChange, videoDevice); Session.AddInput(videoDeviceInput); VideoDeviceInput = videoDeviceInput; } else { Session.AddInput(VideoDeviceInput); } AVCaptureConnection connection = MovieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } Session.CommitConfiguration(); DispatchQueue.MainQueue.DispatchAsync(() => { CameraButton.Enabled = true; RecordButton.Enabled = true; StillButton.Enabled = true; }); }); }
void ResumeInterruptedSession(CameraViewController sender) { sessionQueue.DispatchAsync(() => { // The session might fail to start running, e.g., if a phone or FaceTime call is still using audio or video. // A failure to start the session running will be communicated via a session runtime error notification. // To avoid repeatedly failing to start the session running, we only try to restart the session running in the // session runtime error handler if we aren't trying to resume the session running. session.StartRunning(); sessionRunning = session.Running; if (!session.Running) { DispatchQueue.MainQueue.DispatchAsync(() => { const string message = "Unable to resume"; UIAlertController alertController = UIAlertController.Create("AVCam", message, UIAlertControllerStyle.Alert); UIAlertAction cancelAction = UIAlertAction.Create("OK", UIAlertActionStyle.Cancel, null); alertController.AddAction(cancelAction); PresentViewController(alertController, true, null); }); } else { DispatchQueue.MainQueue.DispatchAsync(() => { ResumeButton.Hidden = true; }); } }); }
void ToggleMovieRecording(CameraViewController sender) { // Disable the Camera button until recording finishes, and disable the Record button until recording starts or finishes. CameraButton.Enabled = false; RecordButton.Enabled = false; SessionQueue.DispatchAsync(() => { if (!MovieFileOutput.Recording) { if (UIDevice.CurrentDevice.IsMultitaskingSupported) { // Setup background task. This is needed because the IAVCaptureFileOutputRecordingDelegate.FinishedRecording // callback is not received until AVCam returns to the foreground unless you request background execution time. // This also ensures that there will be time to write the file to the photo library when AVCam is backgrounded. // To conclude this background execution, UIApplication.SharedApplication.EndBackgroundTask is called in // IAVCaptureFileOutputRecordingDelegate.FinishedRecording after the recorded file has been saved. backgroundRecordingID = UIApplication.SharedApplication.BeginBackgroundTask(null); } // Update the orientation on the movie file output video connection before starting recording. AVCaptureConnection connection = MovieFileOutput.ConnectionFromMediaType(AVMediaType.Video); var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; connection.VideoOrientation = previewLayer.Connection.VideoOrientation; // Turn OFF flash for video recording. SetFlashModeForDevice(AVCaptureFlashMode.Off, VideoDeviceInput.Device); // Start recording to a temporary file. MovieFileOutput.StartRecordingToOutputFile(new NSUrl(GetTmpFilePath("mov"), false), this); } else { MovieFileOutput.StopRecording(); } }); }
void ResumeInterruptedSession (CameraViewController sender) { sessionQueue.DispatchAsync (() => { // The session might fail to start running, e.g., if a phone or FaceTime call is still using audio or video. // A failure to start the session running will be communicated via a session runtime error notification. // To avoid repeatedly failing to start the session running, we only try to restart the session running in the // session runtime error handler if we aren't trying to resume the session running. session.StartRunning (); sessionRunning = session.Running; if (!session.Running) { DispatchQueue.MainQueue.DispatchAsync (() => { const string message = "Unable to resume"; UIAlertController alertController = UIAlertController.Create ("AVCam", message, UIAlertControllerStyle.Alert); UIAlertAction cancelAction = UIAlertAction.Create ("OK", UIAlertActionStyle.Cancel, null); alertController.AddAction (cancelAction); PresentViewController (alertController, true, null); }); } else { DispatchQueue.MainQueue.DispatchAsync (() => { ResumeButton.Hidden = true; }); } }); }
void SnapStillImage (CameraViewController sender) { SessionQueue.DispatchAsync (async () => { AVCaptureConnection connection = StillImageOutput.ConnectionFromMediaType (AVMediaType.Video); var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; // Update the orientation on the still image output video connection before capturing. connection.VideoOrientation = previewLayer.Connection.VideoOrientation; // Flash set to Auto for Still Capture. SetFlashModeForDevice (AVCaptureFlashMode.Auto, VideoDeviceInput.Device); // Capture a still image. try { var imageDataSampleBuffer = await StillImageOutput.CaptureStillImageTaskAsync (connection); // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously. NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData (imageDataSampleBuffer); PHPhotoLibrary.RequestAuthorization (status => { if (status == PHAuthorizationStatus.Authorized) { // To preserve the metadata, we create an asset from the JPEG NSData representation. // Note that creating an asset from a UIImage discards the metadata. // In iOS 9, we can use AddResource method on PHAssetCreationRequest class. // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:]. if (UIDevice.CurrentDevice.CheckSystemVersion (9, 0)) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => { var request = PHAssetCreationRequest.CreationRequestForAsset (); request.AddResource (PHAssetResourceType.Photo, imageData, null); }, (success, err) => { if (!success) Console.WriteLine ("Error occurred while saving image to photo library: {0}", err); }); } else { var temporaryFileUrl = new NSUrl (GetTmpFilePath ("jpg"), false); PHPhotoLibrary.SharedPhotoLibrary.PerformChanges (() => { NSError error = null; if (imageData.Save (temporaryFileUrl, NSDataWritingOptions.Atomic, out error)) PHAssetChangeRequest.FromImage (temporaryFileUrl); else Console.WriteLine ("Error occured while writing image data to a temporary file: {0}", error); }, (success, error) => { if (!success) Console.WriteLine ("Error occurred while saving image to photo library: {0}", error); // Delete the temporary file. NSError deleteError; NSFileManager.DefaultManager.Remove (temporaryFileUrl, out deleteError); }); } } }); } catch (NSErrorException ex) { Console.WriteLine ("Could not capture still image: {0}", ex.Error); } }); }
void ChangeCamera (CameraViewController sender) { CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; SessionQueue.DispatchAsync (() => { AVCaptureDevice currentVideoDevice = VideoDeviceInput.Device; AVCaptureDevicePosition preferredPosition = AVCaptureDevicePosition.Unspecified; AVCaptureDevicePosition currentPosition = currentVideoDevice.Position; switch (currentPosition) { case AVCaptureDevicePosition.Unspecified: case AVCaptureDevicePosition.Front: preferredPosition = AVCaptureDevicePosition.Back; break; case AVCaptureDevicePosition.Back: preferredPosition = AVCaptureDevicePosition.Front; break; } AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, preferredPosition); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice); Session.BeginConfiguration (); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. Session.RemoveInput (VideoDeviceInput); if (Session.CanAddInput (videoDeviceInput)) { subjectSubscriber.Dispose (); SetFlashModeForDevice (AVCaptureFlashMode.Auto, videoDevice); subjectSubscriber = NSNotificationCenter.DefaultCenter.AddObserver (AVCaptureDevice.SubjectAreaDidChangeNotification, SubjectAreaDidChange, videoDevice); Session.AddInput (videoDeviceInput); VideoDeviceInput = videoDeviceInput; } else { Session.AddInput (VideoDeviceInput); } AVCaptureConnection connection = MovieFileOutput.ConnectionFromMediaType (AVMediaType.Video); if (connection.SupportsVideoStabilization) connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; Session.CommitConfiguration (); DispatchQueue.MainQueue.DispatchAsync (() => { CameraButton.Enabled = true; RecordButton.Enabled = true; StillButton.Enabled = true; }); }); }
void ToggleMovieRecording (CameraViewController sender) { // Disable the Camera button until recording finishes, and disable the Record button until recording starts or finishes. CameraButton.Enabled = false; RecordButton.Enabled = false; SessionQueue.DispatchAsync (() => { if (!MovieFileOutput.Recording) { if (UIDevice.CurrentDevice.IsMultitaskingSupported) { // Setup background task. This is needed because the IAVCaptureFileOutputRecordingDelegate.FinishedRecording // callback is not received until AVCam returns to the foreground unless you request background execution time. // This also ensures that there will be time to write the file to the photo library when AVCam is backgrounded. // To conclude this background execution, UIApplication.SharedApplication.EndBackgroundTask is called in // IAVCaptureFileOutputRecordingDelegate.FinishedRecording after the recorded file has been saved. backgroundRecordingID = UIApplication.SharedApplication.BeginBackgroundTask (null); } // Update the orientation on the movie file output video connection before starting recording. AVCaptureConnection connection = MovieFileOutput.ConnectionFromMediaType (AVMediaType.Video); var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; connection.VideoOrientation = previewLayer.Connection.VideoOrientation; // Turn OFF flash for video recording. SetFlashModeForDevice (AVCaptureFlashMode.Off, VideoDeviceInput.Device); // Start recording to a temporary file. MovieFileOutput.StartRecordingToOutputFile (new NSUrl(GetTmpFilePath ("mov"), false), this); } else { MovieFileOutput.StopRecording (); } }); }
void SnapStillImage(CameraViewController sender) { SessionQueue.DispatchAsync(async() => { AVCaptureConnection connection = StillImageOutput.ConnectionFromMediaType(AVMediaType.Video); var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; // Update the orientation on the still image output video connection before capturing. connection.VideoOrientation = previewLayer.Connection.VideoOrientation; // Flash set to Auto for Still Capture. SetFlashModeForDevice(AVCaptureFlashMode.Auto, VideoDeviceInput.Device); // Capture a still image. try { var imageDataSampleBuffer = await StillImageOutput.CaptureStillImageTaskAsync(connection); // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously. NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData(imageDataSampleBuffer); PHPhotoLibrary.RequestAuthorization(status => { if (status == PHAuthorizationStatus.Authorized) { // To preserve the metadata, we create an asset from the JPEG NSData representation. // Note that creating an asset from a UIImage discards the metadata. // In iOS 9, we can use AddResource method on PHAssetCreationRequest class. // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:]. if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0)) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { var request = PHAssetCreationRequest.CreationRequestForAsset(); request.AddResource(PHAssetResourceType.Photo, imageData, null); }, (success, err) => { if (!success) { Console.WriteLine("Error occurred while saving image to photo library: {0}", err); } }); } else { var temporaryFileUrl = new NSUrl(GetTmpFilePath("jpg"), false); PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { NSError error = null; if (imageData.Save(temporaryFileUrl, NSDataWritingOptions.Atomic, out error)) { PHAssetChangeRequest.FromImage(temporaryFileUrl); } else { Console.WriteLine("Error occured while writing image data to a temporary file: {0}", error); } }, (success, error) => { if (!success) { Console.WriteLine("Error occurred while saving image to photo library: {0}", error); } // Delete the temporary file. NSError deleteError; NSFileManager.DefaultManager.Remove(temporaryFileUrl, out deleteError); }); } } }); } catch (NSErrorException ex) { Console.WriteLine("Could not capture still image: {0}", ex.Error); } }); }