public async Task <Byte[]> CapturePhoto(CropRatios cropRatios) { var videoConnection = StillImageOutput.ConnectionFromMediaType(AVMediaType.Video); var sampleBuffer = await StillImageOutput.CaptureStillImageTaskAsync(videoConnection); var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer); UIImage imageInfo = new UIImage(jpegImageAsNsData); // The following code would rotate the image based on the device orientation, but currently we lock to landscape. //**************************************** //UIImageOrientation? orientationToApply = null; //switch (GetVideoOrientationFromDevice()) //{ // case AVCaptureVideoOrientation.Portrait: // orientationToApply = UIImageOrientation.Right; // break; // case AVCaptureVideoOrientation.LandscapeLeft: // orientationToApply = UIImageOrientation.Down; // break; // case AVCaptureVideoOrientation.LandscapeRight: // orientationToApply = null; // break; // case AVCaptureVideoOrientation.PortraitUpsideDown: // orientationToApply = UIImageOrientation.Left; // break; // default: // break; //} //var rotatedImage = ScaleAndRotateImage(imageInfo, orientationToApply); //**************************************** nfloat cropPhotoX = (nfloat)(cropRatios.LeftRatio * imageInfo.Size.Width); nfloat cropPhotoY = (nfloat)(cropRatios.TopRatio * imageInfo.Size.Height); nfloat cropPhotoWidth = (nfloat)(imageInfo.Size.Width * (1 - (cropRatios.LeftRatio + cropRatios.RightRatio))); nfloat cropPhotoHeight = (nfloat)(imageInfo.Size.Height * (1 - (cropRatios.TopRatio + cropRatios.BottomRatio))); var croppedImage = CropImage(imageInfo, cropPhotoX, cropPhotoY, cropPhotoWidth, cropPhotoHeight); // Rotate after cropping since we are locking orentation to landscape. Otherwise this line should be removed. var rotatedImage = ScaleAndRotateImage(croppedImage, UIImageOrientation.Left); Byte[] imageByteArray; using (Foundation.NSData imageData = rotatedImage.AsPNG()) { imageByteArray = new Byte[imageData.Length]; System.Runtime.InteropServices.Marshal.Copy(imageData.Bytes, imageByteArray, 0, Convert.ToInt32(imageData.Length)); } return(imageByteArray); }
public async Task <Stream> CapturePhoto() { var videoConnection = StillImageOutput.ConnectionFromMediaType(AVMediaType.Video); var sampleBuffer = await StillImageOutput.CaptureStillImageTaskAsync(videoConnection); var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer); return(jpegImageAsNsData.AsStream()); }
void AddObservers() { runningObserver = Session.AddObserver("running", NSKeyValueObservingOptions.New, OnSessionRunningChanged); capturingStillObserver = StillImageOutput.AddObserver("capturingStillImage", NSKeyValueObservingOptions.New, OnCapturingStillImageChanged); recordingObserver = MovieFileOutput.AddObserver("recording", NSKeyValueObservingOptions.New, OnRecordingChanged); subjectSubscriber = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureDevice.SubjectAreaDidChangeNotification, SubjectAreaDidChange, VideoDeviceInput.Device); runtimeErrorObserver = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureSession.RuntimeErrorNotification, SessionRuntimeError, Session); // A session can only run when the app is full screen. It will be interrupted in a multi-app layout, introduced in iOS 9. // Add observers to handle these session interruptions // and show a preview is paused message. See the documentation of AVCaptureSession.WasInterruptedNotification for other // interruption reasons. interuptionObserver = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureSession.WasInterruptedNotification, SessionWasInterrupted, Session); interuptionEndedObserver = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureSession.InterruptionEndedNotification, SessionInterruptionEnded, Session); }
void SnapStillImage(CameraViewController sender) { SessionQueue.DispatchAsync(async() => { AVCaptureConnection connection = StillImageOutput.ConnectionFromMediaType(AVMediaType.Video); var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; // Update the orientation on the still image output video connection before capturing. connection.VideoOrientation = previewLayer.Connection.VideoOrientation; // Flash set to Auto for Still Capture. SetFlashModeForDevice(AVCaptureFlashMode.Auto, VideoDeviceInput.Device); // Capture a still image. try { var imageDataSampleBuffer = await StillImageOutput.CaptureStillImageTaskAsync(connection); // The sample buffer is not retained. Create image data before saving the still image to the photo library asynchronously. NSData imageData = AVCaptureStillImageOutput.JpegStillToNSData(imageDataSampleBuffer); PHPhotoLibrary.RequestAuthorization(status => { if (status == PHAuthorizationStatus.Authorized) { // To preserve the metadata, we create an asset from the JPEG NSData representation. // Note that creating an asset from a UIImage discards the metadata. // In iOS 9, we can use AddResource method on PHAssetCreationRequest class. // In iOS 8, we save the image to a temporary file and use +[PHAssetChangeRequest creationRequestForAssetFromImageAtFileURL:]. if (UIDevice.CurrentDevice.CheckSystemVersion(9, 0)) { PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { var request = PHAssetCreationRequest.CreationRequestForAsset(); request.AddResource(PHAssetResourceType.Photo, imageData, null); }, (success, err) => { if (!success) { Console.WriteLine("Error occurred while saving image to photo library: {0}", err); } }); } else { var temporaryFileUrl = new NSUrl(GetTmpFilePath("jpg"), false); PHPhotoLibrary.SharedPhotoLibrary.PerformChanges(() => { NSError error = null; if (imageData.Save(temporaryFileUrl, NSDataWritingOptions.Atomic, out error)) { PHAssetChangeRequest.FromImage(temporaryFileUrl); } else { Console.WriteLine("Error occured while writing image data to a temporary file: {0}", error); } }, (success, error) => { if (!success) { Console.WriteLine("Error occurred while saving image to photo library: {0}", error); } // Delete the temporary file. NSError deleteError; NSFileManager.DefaultManager.Remove(temporaryFileUrl, out deleteError); }); } } }); } catch (NSErrorException ex) { Console.WriteLine("Could not capture still image: {0}", ex.Error); } }); }