void setupCaptureSession () { if (CaptureSession != null) return; CaptureSession = new AVCaptureSession (); NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession); applicationWillEnterForegroundNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (), UIApplication.SharedApplication, NSOperationQueue.CurrentQueue, delegate(NSNotification notification) { applicationWillEnterForeground (); }); videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); NSError error; videoInput = new AVCaptureDeviceInput (videoDevice, out error); if (CaptureSession.CanAddInput (videoInput)) CaptureSession.AddInput (videoInput); metadataOutput = new AVCaptureMetadataOutput (); var metadataQueue = new DispatchQueue ("com.AVCam.metadata"); metadataObjectsDelegate = new MetadataObjectsDelegate { DidOutputMetadataObjectsAction = DidOutputMetadataObjects }; metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue); if (CaptureSession.CanAddOutput (metadataOutput)) CaptureSession.AddOutput (metadataOutput); }
void ConfigureCameraForDevice (AVCaptureDevice device) { var error = new NSError (); if (device.IsFocusModeSupported (AVCaptureFocusMode.ContinuousAutoFocus)) { device.LockForConfiguration (out error); device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; device.UnlockForConfiguration (); } else if (device.IsExposureModeSupported (AVCaptureExposureMode.ContinuousAutoExposure)) { device.LockForConfiguration (out error); device.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; device.UnlockForConfiguration (); } else if (device.IsWhiteBalanceModeSupported (AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { device.LockForConfiguration (out error); device.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; device.UnlockForConfiguration (); } }
void CreateDevice () { NSError error; device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (device == null) { throw new Exception ("No default video device"); } device.LockForConfiguration(out error); if (error != null) { throw new Exception ("Could not configure. Error: " + error); } device.ActiveVideoMinFrameDuration = new CMTime (1, 30); device.UnlockForConfiguration(); }
private static AVCaptureDevice GetDefaultDevice() { var device = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (device != null) { return(device); } device = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (device != null) { return(device); } device = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); return(device); }
public void ConfigureCameraForDevice(AVCaptureDevice device) { var error = new NSError(); if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { device.LockForConfiguration(out error); device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; device.UnlockForConfiguration(); } else if (device.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { device.LockForConfiguration(out error); device.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; device.UnlockForConfiguration(); } else if (device.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { device.LockForConfiguration(out error); device.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; device.UnlockForConfiguration(); } }
/////// <summary> /////// Select a picture from library. /////// </summary> /////// <param name="options">The storage options.</param> /////// <returns>Task representing the asynchronous operation.</returns> ////public Task<MediaFile> SelectPhotoAsync(CameraMediaStorageOptions options) ////{ //// // If photos are not supported //// if (!this.IsPhotosSupported) //// { //// throw new NotSupportedException(); //// } //// // Get the image from pictures //// return this.GetMediaAsync(UIImagePickerControllerSourceType.PhotoLibrary, MediaPickerIOS.TypeImage); ////} /// <summary> /// Takes the picture. /// </summary> /// <param name="options">The storage options.</param> /// <returns>Task representing the asynchronous operation.</returns> public override Task <MediaFile> TakePhotoAsync(CameraMediaStorageOptions options) { // If photos are not supported if (!this.IsPhotosSupported) { throw new NotSupportedException(); } // If camera is not supported if (!this.IsCameraAvailable) { throw new NotSupportedException(); } // If the camera permission is not determined var status = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); if (status == AVAuthorizationStatus.NotDetermined) { var granted = AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video); granted.Wait(); if (!granted.Result) { return(null); } } // If camera is restricted if ((status == AVAuthorizationStatus.Denied) || (status == AVAuthorizationStatus.Restricted)) { return(null); } // Take the camera photo MediaPickerIOS.VerifyCameraOptions(options); return(this.GetMediaAsync(UIImagePickerControllerSourceType.Camera, MediaPickerIOS.TypeImage, options)); }
public void Video() { TestRuntime.AssertSystemVersion(PlatformName.iOS, 7, 0, throwIfOtherPlatform: false); CMFormatDescriptionError fde; var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); switch (auth) { case AVAuthorizationStatus.Restricted: case AVAuthorizationStatus.Denied: case AVAuthorizationStatus.NotDetermined: // We can't test the below, since the some other tests may have initialized whatever we need for the API to work correctly. // Assert.Null (CMFormatDescription.Create (CMMediaType.Video, (uint) CMVideoCodecType.H264, out fde), "null ({0})", auth); // Assert.That (fde, Is.EqualTo (CMFormatDescriptionError.InvalidParameter), "CMFormatDescriptionError"); break; case AVAuthorizationStatus.Authorized: // We can't test the below, since the some other tests may have initialized whatever we need for the API to work correctly. // Assert.Null (CMFormatDescription.Create (CMMediaType.Video, (uint) CMVideoCodecType.H264, out fde), "null (authorized)"); // Assert.That (fde, Is.EqualTo (CMFormatDescriptionError.InvalidParameter), "CMFormatDescriptionError (authorized)"); using (var captureSession = new AVCaptureSession()) { using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) { NSError error; using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) { // this seems to initialize something. } } } Assert.IsNotNull(CMFormatDescription.Create(CMMediaType.Video, (uint)CMVideoCodecType.H264, out fde), "not null (authorized)"); Assert.That(fde, Is.EqualTo(CMFormatDescriptionError.None), "CMFormatDescriptionError #2 (authorized)"); break; } }
public void ShowAVPermissionsWithType(ClusterAVAuthorizationType mediaType, string requestTitle, string message, string denyButtonTitle, string grantButtonTitle, ClusterPrePermissionCompletionHandler completionHandler) { if (requestTitle.Length == 0) { switch (mediaType) { case ClusterAVAuthorizationType.Camera: requestTitle = @"Access Camera?"; break; default: requestTitle = @"Access Microphone?"; break; } } denyButtonTitle = GetTitleForType(ClusterTitleType.Deny, denyButtonTitle); grantButtonTitle = GetTitleForType(ClusterTitleType.Request, grantButtonTitle); AVAuthorizationStatus status = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); if (status == AVAuthorizationStatus.NotDetermined) { _avPermissionCompletionHandler = completionHandler; _preAVPermissionAlertView = new UIAlertView(requestTitle, message, new AlertViewDelegate(this), denyButtonTitle, grantButtonTitle); _preAVPermissionAlertView.Tag = (nint)(int)mediaType; _preAVPermissionAlertView.Show(); } else { if (completionHandler != null) { completionHandler((status == AVAuthorizationStatus.Authorized), ClusterDialogResult.NoActionTaken, ClusterDialogResult.NoActionTaken); } } }
void Initialize() { CaptureSession = new AVCaptureSession(); CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); CaptureOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; CaptureSession.AddOutput(CaptureOutput); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); IsPreviewing = true; }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); IsPreviewing = true; }
void setupCaptureSession() { //Create a device for capturing Barcodes var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); //Configure the dvice for something fancy autofocus stuffs ConfigureCameraForDevice(captureDevice); //Create an input from that device - meaning to instaniate the device to make an input node... err something like that var captureInput = AVCaptureDeviceInput.FromDevice(captureDevice); //Add the input to the session session.AddInput(captureInput); //Create a preview layer for the view var previewLayer = AVCaptureVideoPreviewLayer.FromSession(session); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = vie_Preview_cam.Frame; //Add the preview layer to the View for the camera uiview vie_Preview_cam.Layer.AddSublayer(previewLayer); //Assign who's going to handle the metadataoutput var metadataoutput = new AVCaptureMetadataOutput(); //Set delegate metadataoutput.SetDelegate(this, CoreFoundation.DispatchQueue.MainQueue); //Add the metadataoutput to session session.AddOutput(metadataoutput); //Assign which type of Codes will be read, metadataoutput.MetadataObjectTypes = AVMetadataObjectType.QRCode; //Start the Session session.StartRunning(); }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; Console.WriteLine(viewLayer.Frame.Width); var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); Console.WriteLine(liveCameraStream.Layer.Frame.Width); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); ViewWillLayoutSubviews(); }
void InitializeCameraLayer() { this.captureSession = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium // TODO investigate that }; var captureDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).Where(dev => dev.Position == AVCaptureDevicePosition.Front).FirstOrDefault(); if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); return; } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); return; } this.captureSession.AddInput(input); // set up the output output = new AVCaptureStillImageOutput(); var dict = new NSMutableDictionary(); dict [AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); captureSession.AddOutput(output); this.previewLayer = AVCaptureVideoPreviewLayer.FromSession(this.captureSession); this.previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; this.previewLayer.Frame = this.View.Frame; this.captureSession.StartRunning(); this.cameraInitialized = true; }
public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions) { // Override point for customization after application launch. // If not required for your application you can safely delete this method //mvvmcross Window = new UIWindow(UIScreen.MainScreen.Bounds); var presenter = new MvxIosViewPresenter(this, Window); var setup = new Setup(this, presenter); setup.Initialize(); var startup = Mvx.Resolve <IMvxAppStart>(); startup.Start(); Window.MakeKeyAndVisible(); AVAuthorizationStatus authStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); return(true); }
void ConfigureCameraForDevice(AVCaptureDevice device) { var error = new NSError(); if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { device.LockForConfiguration(out error); device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; device.UnlockForConfiguration(); } if (device.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { device.LockForConfiguration(out error); device.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; device.UnlockForConfiguration(); } if (device.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { device.LockForConfiguration(out error); device.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; device.UnlockForConfiguration(); } // @todo AVCaptureDeviceFormat look into what are valid values for this. }
public void Start() { captureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(captureSession) { VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; try { var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); captureSession.AddInput(input); Layer.AddSublayer(previewLayer); captureSession.StartRunning(); } catch (Exception e) { Console.WriteLine(e); } }
private void CheckVideoPermissionAndStart() { AVFoundation.AVAuthorizationStatus authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); switch (authorizationStatus) { case AVAuthorizationStatus.NotDetermined: AVCaptureDevice.RequestAccessForMediaType(AVMediaType.Video, delegate(bool granted) { if (granted) { SetupCaptureSession(); } else { _label.Text = "Please grant Video Capture permission"; //RenderImageMessage("Please grant Video Capture permission"); } }); break; case AVAuthorizationStatus.Authorized: SetupCaptureSession(); break; case AVAuthorizationStatus.Denied: case AVAuthorizationStatus.Restricted: _label.Text = "Please grant Video Capture permission"; //RenderImageMessage("Please grant Video Capture permission"); break; default: break; //do nothing } }
static void SetFlashModeForDevice (AVCaptureFlashMode flashMode, AVCaptureDevice device) { if (device.HasFlash && device.IsFlashModeSupported (flashMode)) { NSError error; if (device.LockForConfiguration (out error)) { device.FlashMode = flashMode; device.UnlockForConfiguration (); } else { Console.WriteLine ("Could not lock device for configuration: {0}", error); } } }
void updateCameraSelection() { session.BeginConfiguration (); AVCaptureInput[] oldInputs = session.Inputs; foreach (var oldInput in oldInputs) session.RemoveInput (oldInput); AVCaptureDeviceInput input = pickCamera (); if (input == null) { foreach (var oldInput in oldInputs) session.AddInput (oldInput); } else { session.AddInput (input); device = input.Device; NSError error; if (!device.LockForConfiguration (out error)) Console.WriteLine ("Could not lock for device: " + error.LocalizedDescription); updateAVFoundationFaceDetection (); } session.CommitConfiguration (); }
public int GetNumberOfVideoCameras() { return(AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).Length); }
/// <summary> /// Requests the permissions from the users /// </summary> /// <returns>The permissions and their status.</returns> /// <param name="permissions">Permissions to request.</param> public async Task <Dictionary <Permission, PermissionStatus> > RequestPermissionsAsync(params Permission[] permissions) { var results = new Dictionary <Permission, PermissionStatus>(); foreach (var permission in permissions) { if (results.ContainsKey(permission)) { continue; } switch (permission) { case Permission.Calendar: results.Add(permission, await RequestEventPermission(EKEntityType.Event).ConfigureAwait(false)); break; case Permission.Camera: try { var authCamera = await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video).ConfigureAwait(false); results.Add(permission, (authCamera ? PermissionStatus.Granted : PermissionStatus.Denied)); } catch (Exception ex) { Debug.WriteLine("Unable to get camera permission: " + ex); results.Add(permission, PermissionStatus.Unknown); } break; case Permission.Contacts: results.Add(permission, await RequestContactsPermission().ConfigureAwait(false)); break; case Permission.Location: results.Add(permission, await RequestLocationPermission().ConfigureAwait(false)); break; case Permission.Microphone: try { var authMic = await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Audio).ConfigureAwait(false); results.Add(permission, (authMic ? PermissionStatus.Granted : PermissionStatus.Denied)); } catch (Exception ex) { Debug.WriteLine("Unable to get microphone permission: " + ex); results.Add(permission, PermissionStatus.Unknown); } break; case Permission.Photos: results.Add(permission, await RequestPhotosPermission().ConfigureAwait(false)); break; case Permission.Reminders: results.Add(permission, await RequestEventPermission(EKEntityType.Reminder).ConfigureAwait(false)); break; case Permission.Sensors: results.Add(permission, await RequestSensorsPermission().ConfigureAwait(false)); break; } if (!results.ContainsKey(permission)) { results.Add(permission, PermissionStatus.Granted); } } return(results); }
public FlashTorch() { // Check if there are any devices with torch support. this.torchDevice = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video)?.FirstOrDefault (d => d.HasTorch); }
private void Initialize() { Opaque = false; BackgroundColor = UIColor.Clear; //Add(_mainView); var picFrameWidth = Math.Round(Frame.Width * 0.90); //screenFrame.Width; var picFrameHeight = Math.Round(Frame.Height * 0.60); var picFrameX = (Frame.Width - picFrameWidth) / 2; var picFrameY = (Frame.Height - picFrameHeight) / 2; var picFrame = new RectangleF((int)picFrameX, (int)picFrameY, (int)picFrameWidth, (int)picFrameHeight); //Setup Overlay var overlaySize = new SizeF(this.Frame.Width, this.Frame.Height - 44); topBg = new UIView(new RectangleF(0, 0, this.Frame.Width, (overlaySize.Height - picFrame.Height) / 2)); topBg.Frame = new RectangleF(0, 0, this.Frame.Width, this.Frame.Height * 0.30f); topBg.BackgroundColor = UIColor.Black; topBg.Alpha = 0.6f; topBg.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleBottomMargin; bottomBg = new UIView(new RectangleF(0, topBg.Frame.Height + picFrame.Height, this.Frame.Width, topBg.Frame.Height)); bottomBg.Frame = new RectangleF(0, this.Frame.Height * 0.70f, this.Frame.Width, this.Frame.Height * 0.30f); bottomBg.BackgroundColor = UIColor.Black; bottomBg.Alpha = 0.6f; bottomBg.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleTopMargin; var redLine = new UIView(new RectangleF(0, this.Frame.Height * 0.5f - 2.0f, this.Frame.Width, 4.0f)); redLine.BackgroundColor = UIColor.Red; redLine.Alpha = 0.4f; redLine.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleBottomMargin | UIViewAutoresizing.FlexibleTopMargin; this.AddSubview(redLine); this.AddSubview(topBg); this.AddSubview(bottomBg); textTop = new UILabel() { Frame = new RectangleF(0, this.Frame.Height * 0.10f, this.Frame.Width, 42), Text = Scanner.TopText, Font = UIFont.SystemFontOfSize(13), TextAlignment = UITextAlignment.Center, TextColor = UIColor.White, Lines = 2, BackgroundColor = UIColor.Clear }; this.AddSubview(textTop); textBottom = new UILabel() { Frame = new RectangleF(0, this.Frame.Height * 0.825f - 32f, this.Frame.Width, 64), Text = Scanner.BottomText, Font = UIFont.SystemFontOfSize(13), TextAlignment = UITextAlignment.Center, TextColor = UIColor.White, Lines = 3, BackgroundColor = UIColor.Clear }; this.AddSubview(textBottom); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); bool hasTorch = false; if (captureDevice != null) { hasTorch = captureDevice.TorchAvailable; } InvokeOnMainThread(delegate { // Setting tool bar var toolBar = new UIToolbar(new RectangleF(0, Frame.Height - 44, Frame.Width, 44)); var buttons = new List <UIBarButtonItem>(); buttons.Add(new UIBarButtonItem(Scanner.CancelButtonText, UIBarButtonItemStyle.Done, delegate { OnCancel(); })); if (hasTorch) { buttons.Add(new UIBarButtonItem(UIBarButtonSystemItem.FlexibleSpace)); buttons.Add(new UIBarButtonItem(Scanner.FlashButtonText, UIBarButtonItemStyle.Done, delegate { OnTorch(); })); } toolBar.Items = buttons.ToArray(); toolBar.TintColor = UIColor.Black; toolBar.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleTopMargin; Add(toolBar); }); }
protected string CameraAccessStatus() { return(AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video).ToString()); }
// Call this on the session queue. void ConfigureSession() { if (setupResult != AVCamSetupResult.Success) { return; } NSError error = null; session.BeginConfiguration(); /* * We do not create an AVCaptureMovieFileOutput when setting up the session because the * AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto. */ session.SessionPreset = AVCaptureSession.PresetPhoto; // Add video input. // Choose the back dual camera if available, otherwise default to a wide angle camera. var videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); if (videoDevice == null) { // If the back dual camera is not available, default to the back wide angle camera. videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back); // In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera. if (videoDevice == null) { videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front); } } var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error); if (lVideoDeviceInput == null) { Console.WriteLine($"Could not create video device input: {error}"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } if (session.CanAddInput(lVideoDeviceInput)) { session.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; DispatchQueue.MainQueue.DispatchAsync(() => { /* * Why are we dispatching this to the main queue? * Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView * can only be manipulated on the main thread. * Note: As an exception to the above rule, it is not necessary to serialize video orientation changes * on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. * * Use the status bar orientation as the initial video orientation. Subsequent orientation changes are * handled by -[AVCamCameraViewController viewWillTransitionToSize:withTransitionCoordinator:]. */ var statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; var initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; } VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine(@"Could not add video device input to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } // Add audio input. var audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { Console.WriteLine($"Could not create audio device input: {error}"); } if (session.CanAddInput(audioDeviceInput)) { session.AddInput(audioDeviceInput); } else { Console.WriteLine(@"Could not add audio device input to the session"); } // Add photo output. var lPhotoOutput = new AVCapturePhotoOutput(); if (session.CanAddOutput(lPhotoOutput)) { session.AddOutput(lPhotoOutput); photoOutput = lPhotoOutput; photoOutput.IsHighResolutionCaptureEnabled = true; photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; //photoOutput.IsDepthDataDeliveryEnabled(photoOutput.IsDepthDataDeliverySupported()); livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? AVCamLivePhotoMode.On : AVCamLivePhotoMode.Off; //depthDataDeliveryMode = photoOutput.IsDepthDataDeliverySupported() ? AVCamDepthDataDeliveryMode.On : AVCamDepthDataDeliveryMode.Off; inProgressPhotoCaptureDelegates = new Dictionary <long, AVCamPhotoCaptureDelegate>(); inProgressLivePhotoCapturesCount = 0; } else { Console.WriteLine(@"Could not add photo output to the session"); setupResult = AVCamSetupResult.SessionConfigurationFailed; session.CommitConfiguration(); return; } backgroundRecordingId = UIApplication.BackgroundTaskInvalid; session.CommitConfiguration(); }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ // No input device return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // configure the output queue = new DispatchQueue ("myQueue"); scanner = new ZxingScanner (this); output.SetSampleBufferDelegateAndQueue (scanner, queue); session.AddOutput (output); previewLayer = new AVCaptureVideoPreviewLayer (session); previewLayer.Orientation = AVCaptureVideoOrientation.Portrait; previewLayer.VideoGravity = "AVLayerVideoGravityResizeAspectFill"; session.StartRunning (); return true; }
public override void ViewDidLoad() { base.ViewDidLoad (); weAreRecording = false; lblError.Hidden = true; btnStartRecording.SetTitle("Start Recording", UIControlState.Normal); //Set up session session = new AVCaptureSession (); //Set up inputs and add them to the session //this will only work if using a physical device! Console.WriteLine ("getting device inputs"); try{ //add video capture device device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice (device); session.AddInput (input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch(Exception ex){ //show the label error. This will always show when running in simulator instead of physical device. lblError.Hidden = false; return; } //Set up preview layer (shows what the input device sees) Console.WriteLine ("setting up preview layer"); previewlayer = new AVCaptureVideoPreviewLayer (session); previewlayer.Frame = this.View.Bounds; //this code makes UI controls sit on top of the preview layer! Allows you to just place the controls in interface builder UIView cameraView = new UIView (); cameraView = new UIView (); cameraView.Layer.AddSublayer (previewlayer); this.View.AddSubview (cameraView); this.View.SendSubviewToBack (cameraView); Console.WriteLine ("Configuring output"); output = new AVCaptureMovieFileOutput (); long totalSeconds = 10000; Int32 preferredTimeScale = 30; CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale); output.MinFreeDiskSpaceLimit = 1024 * 1024; output.MaxRecordedDuration = maxDuration; if (session.CanAddOutput (output)) { session.AddOutput (output); } session.SessionPreset = AVCaptureSession.PresetMedium; Console.WriteLine ("About to start running session"); session.StartRunning (); //toggle recording button was pushed. btnStartRecording.TouchUpInside += startStopPushed; //Console.ReadLine (); }
public AVCaptureDeviceInput (AVCaptureDevice device, IntPtr handle) : this (device, out globalerr) { }
static public AVCaptureDeviceInput FromDevice (AVCaptureDevice device) { NSError error; return FromDevice (device, out error); }
private void flipCamera() { if (TakePictureButton.Tag == 0) { CaptureSession.StopRunning(); CaptureSession.BeginConfiguration(); if (fontBack == 1) { fontBack = 0; CaptureSession.RemoveInput(frontCamera); CaptureSession.AddInput(rearCamera); currentDevice = inputDevice2; } else { fontBack = 1; CaptureSession.RemoveInput(rearCamera); CaptureSession.AddInput(frontCamera); currentDevice = inputDevice1; } CaptureSession.CommitConfiguration(); CaptureSession.StartRunning(); } }
void teardownAVCapture() { session.StopRunning (); teardownAVFoundationFaceDetection (); device.UnlockForConfiguration (); device.RemoveObserver (this, (NSString) "videoZoomFactor"); device.RemoveObserver (this, (NSString) "rampingVideoZoom"); device = null; session = null; }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = this.Frame; previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(this.Frame); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput() { //videoSettings VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReader(null, (img) => { var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value) { return(src); } var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait; InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation); switch (tmpInterfaceOrientation) { case UIInterfaceOrientation.Portrait: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.PortraitUpsideDown: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.LandscapeLeft: return(src); case UIInterfaceOrientation.LandscapeRight: return(src); } return(src); }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (ScanningOptions.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value); barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value; } if (ScanningOptions.PureBarcode.HasValue) { barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value; } if (ScanningOptions.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value); barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value; } if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet)) { barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet; } if (ScanningOptions.TryInverted.HasValue) { barcodeReader.TryInverted = ScanningOptions.TryInverted.Value; } if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>(); foreach (var pf in ScanningOptions.PossibleFormats) { barcodeReader.Options.PossibleFormats.Add(pf); } } outputRecorder = new OutputRecorder(ScanningOptions, img => { if (!IsAnalyzing) { return; } try { var started = DateTime.Now; var rs = barcodeReader.Decode(img); var total = DateTime.Now - started; Console.WriteLine("Decode Time: " + total.TotalMilliseconds + " ms"); if (rs != null) { resultCallback(rs); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); Console.WriteLine("SetupCamera Finished"); session.AddOutput(output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus; if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
bool SetupCaptureSession() { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession(); captureSession.BeginConfiguration(); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); if (audioDevice == null) { return(false); // e.g. simulator } var audioIn = new AVCaptureDeviceInput(audioDevice, out error); if (captureSession.CanAddInput(audioIn)) { captureSession.AddInput(audioIn); } var audioOut = new AVCaptureAudioDataOutput(); var audioCaptureQueue = new DispatchQueue("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue); if (captureSession.CanAddOutput(audioOut)) { captureSession.AddOutput(audioOut); } audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput(videoDevice, out error); if (captureSession.CanAddInput(videoIn)) { captureSession.AddInput(videoIn); } // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue); if (captureSession.CanAddOutput(videoOut)) { captureSession.AddOutput(videoOut); } // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType(AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration(); return(true); }
void ChangeCamera(NSObject sender) { //CameraButton.Enabled = false; //RecordButton.Enabled = false; //PhotoButton.Enabled = false; //LivePhotoModeButton.Enabled = false; //CaptureModeControl.Enabled = false; sessionQueue.DispatchAsync(() => { var currentVideoDevice = videoDeviceInput.Device; var currentPosition = currentVideoDevice.Position; AVCaptureDevicePosition preferredPosition = AVCaptureDevicePosition.Unspecified; AVCaptureDeviceType preferredDeviceType = AVCaptureDeviceType.BuiltInDualCamera; switch (currentPosition) { //case AVCaptureDevicePosition.Unspecified: //preferredPosition = AVCaptureDevicePosition.Back; //preferredDeviceType = AVCaptureDeviceType.BuiltInDualCamera; //break; case AVCaptureDevicePosition.Unspecified: case AVCaptureDevicePosition.Front: preferredPosition = AVCaptureDevicePosition.Back; preferredDeviceType = AVCaptureDeviceType.BuiltInDualCamera; break; case AVCaptureDevicePosition.Back: preferredPosition = AVCaptureDevicePosition.Front; preferredDeviceType = AVCaptureDeviceType.BuiltInWideAngleCamera; break; } var devices = videoDeviceDiscoverySession.Devices; AVCaptureDevice newVideoDevice = null; // First, look for a device with both the preferred position and device type. foreach (var device in devices) { if (device.Position == preferredPosition && device.DeviceType.GetConstant() == preferredDeviceType.GetConstant()) { newVideoDevice = device; break; } } // Otherwise, look for a device with only the preferred position. if (newVideoDevice == null) { foreach (var device in devices) { if (device.Position == preferredPosition) { newVideoDevice = device; break; } } } if (newVideoDevice != null) { var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(newVideoDevice); session.BeginConfiguration(); // Remove the existing device input first, since using the front and back camera simultaneously is not supported. session.RemoveInput(videoDeviceInput); if (session.CanAddInput(lVideoDeviceInput)) { if (subjectAreaDidChangeObserver != null) { subjectAreaDidChangeObserver.Dispose(); } subjectAreaDidChangeObserver = NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureDevice.SubjectAreaDidChangeNotification, SubjectAreaDidChange, newVideoDevice); session.AddInput(lVideoDeviceInput); videoDeviceInput = lVideoDeviceInput; } else { session.AddInput(videoDeviceInput); } if (movieFileOutput != null) { var movieFileOutputConnection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (movieFileOutputConnection.SupportsVideoStabilization) { movieFileOutputConnection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } /* * Set Live Photo capture and depth data delivery if it is supported. When changing cameras, the * `livePhotoCaptureEnabled` and `depthDataDeliveryEnabled` properties of the AVCapturePhotoOutput gets set to NO when * a video device is disconnected from the session. After the new video device is * added to the session, re-enable Live Photo capture and depth data delivery if they are supported. */ photoOutput.IsLivePhotoCaptureEnabled = photoOutput.IsLivePhotoCaptureSupported; //photoOutput.IsDepthDataDeliveryEnabled(photoOutput.IsDepthDataDeliverySupported()); session.CommitConfiguration(); } //DispatchQueue.MainQueue.DispatchAsync(() => //{ // CameraButton.Enabled = true; // RecordButton.Enabled = CaptureModeControl.SelectedSegment == (int)AVCamCaptureMode.Movie; // PhotoButton.Enabled = true; // LivePhotoModeButton.Enabled = true; // CaptureModeControl.Enabled = true; // DepthDataDeliveryButton.Enabled = photoOutput.IsDepthDataDeliveryEnabled(); // DepthDataDeliveryButton.Hidden = !photoOutput.IsDepthDataDeliverySupported(); //}); }); }
/// <summary> /// Requests user access to the device. /// </summary> /// <returns>True if access was granted, otherwise false.</returns> public Task <bool> RequestAccessAsync() { return(AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video)); }
public override void ViewDidLoad() { base.ViewDidLoad(); this.View.BackgroundColor = UIColor.White; NSError error; // Setup detector options. var options = new CIDetectorOptions { Accuracy = FaceDetectorAccuracy.High, // Can give a hint here about the rects to detect. 1.4 would be for A4 sheets of paper for instance. AspectRatio = 1.41f, }; // Create a rectangle detector. Note that you can also create QR detector or a face detector. // Most of this code will also work with other detectors (like streaming to a preview layer and grabbing images). this.detector = CIDetector.CreateRectangleDetector(context: null, detectorOptions: options); // Create the session. The AVCaptureSession is the managing instance of the whole video handling. var captureSession = new AVCaptureSession() { // Defines what quality we want to use for the images we grab. Photo gives highest resolutions. SessionPreset = AVCaptureSession.PresetPhoto }; // Find a suitable AVCaptureDevice for video input. var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (device == null) { // This will not work on the iOS Simulator - there is no camera. :-) throw new InvalidProgramException("Failed to get AVCaptureDevice for video input!"); } // Create a device input with the device and add it to the session. var videoInput = AVCaptureDeviceInput.FromDevice(device, out error); if (videoInput == null) { throw new InvalidProgramException("Failed to get AVCaptureDeviceInput from AVCaptureDevice!"); } // Let session read from the input, this is our source. captureSession.AddInput(videoInput); // Create output for the video stream. This is the destination. var videoOutput = new AVCaptureVideoDataOutput() { AlwaysDiscardsLateVideoFrames = true }; // Define the video format we want to use. Note that Xamarin exposes the CompressedVideoSetting and UncompressedVideoSetting // properties on AVCaptureVideoDataOutput un Unified API, but I could not get these to work. The VideoSettings property is deprecated, // so I use the WeakVideoSettings instead which takes an NSDictionary as input. this.videoSettingsDict = new NSMutableDictionary(); this.videoSettingsDict.Add(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromUInt32((uint)CVPixelFormatType.CV32BGRA)); videoOutput.WeakVideoSettings = this.videoSettingsDict; // Create a delegate to report back to us when an image has been captured. // We want to grab the camera stream and feed it through a AVCaptureVideoDataOutputSampleBufferDelegate // which allows us to get notified if a new image is availeble. An implementation of that delegate is VideoFrameSampleDelegate in this project. this.sampleBufferDelegate = new VideoFrameSamplerDelegate(); // Processing happens via Grand Central Dispatch (GCD), so we need to provide a queue. // This is pretty much like a system managed thread (see: http://zeroheroblog.com/ios/concurrency-in-ios-grand-central-dispatch-gcd-dispatch-queues). this.sessionQueue = new DispatchQueue("AVSessionQueue"); // Assign the queue and the delegate to the output. Now all output will go through the delegate. videoOutput.SetSampleBufferDelegate(this.sampleBufferDelegate, this.sessionQueue); // Add output to session. captureSession.AddOutput(videoOutput); // We also want to visualize the input stream. The raw stream can be fed into an AVCaptureVideoPreviewLayer, which is a subclass of CALayer. // A CALayer can be added to a UIView. We add that layer to the controller's main view. var layer = this.View.Layer; this.videoLayer = AVCaptureVideoPreviewLayer.FromSession(captureSession); this.videoLayer.Frame = layer.Bounds; layer.AddSublayer(this.videoLayer); // All setup! Start capturing! captureSession.StartRunning(); // This is just for information and allows you to get valid values for the detection framerate. Console.WriteLine("Available capture framerates:"); var rateRanges = device.ActiveFormat.VideoSupportedFrameRateRanges; foreach (var r in rateRanges) { Console.WriteLine(r.MinFrameRate + "; " + r.MaxFrameRate + "; " + r.MinFrameDuration + "; " + r.MaxFrameDuration); } // Configure framerate. Kind of weird way of doing it but the only one that works. device.LockForConfiguration(out error); // CMTime constructor means: 1 = one second, DETECTION_FPS = how many samples per unit, which is 1 second in this case. device.ActiveVideoMinFrameDuration = new CMTime(1, DETECTION_FPS); device.ActiveVideoMaxFrameDuration = new CMTime(1, DETECTION_FPS); device.UnlockForConfiguration(); // Put a small image view at the top left that shows the live image with the detected rectangle(s). this.imageViewOverlay = new UIImageView { ContentMode = UIViewContentMode.ScaleAspectFit, BackgroundColor = UIColor.Gray }; this.imageViewOverlay.Layer.BorderColor = UIColor.Red.CGColor; this.imageViewOverlay.Layer.BorderWidth = 3f; this.Add(this.imageViewOverlay); // Put another image view top right that shows the image with perspective correction. this.imageViewPerspective = new UIImageView { ContentMode = UIViewContentMode.ScaleAspectFit, BackgroundColor = UIColor.Gray }; this.imageViewPerspective.Layer.BorderColor = UIColor.Red.CGColor; this.imageViewPerspective.Layer.BorderWidth = 3f; this.Add(this.imageViewPerspective); // Add some lables for information. this.mainWindowLbl = new UILabel { Text = "Live stream from camera. Point camera to a rectangular object.", TextAlignment = UITextAlignment.Center }; this.Add(this.mainWindowLbl); this.detectionWindowLbl = new UILabel { Text = "Detected rectangle overlay", TextAlignment = UITextAlignment.Center }; this.Add(this.detectionWindowLbl); this.perspectiveWindowLbl = new UILabel { Text = "Perspective corrected", TextAlignment = UITextAlignment.Center }; this.Add(this.perspectiveWindowLbl); }
private AVAuthorizationStatus GetAuthorizationStatus() { return(AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video)); }
protected void Initialize() { // configure the capture session for medium resolution, change this if your code // can cope with more data or volume CaptureSession = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } // SET to slow motion NSError error; var input = new AVCaptureDeviceInput(device, out error); movieFileOutput = new AVCaptureMovieFileOutput { //set max record time to 10 minutes MaxRecordedDuration = CMTime.FromSeconds(600, 1) }; photoFileOutput = new AVCapturePhotoOutput(); photoFileOutput.IsHighResolutionCaptureEnabled = true; if (CaptureSession.CanAddOutput(movieFileOutput)) { CaptureSession.BeginConfiguration(); CaptureSession.AddOutput(movieFileOutput); CaptureSession.AddOutput(photoFileOutput); var ranges = device.ActiveFormat.VideoSupportedFrameRateRanges; if (device.LockForConfiguration(out error)) { device.ActiveVideoMinFrameDuration = new CMTime(1, (int)ranges.First().MinFrameRate); device.ActiveVideoMaxFrameDuration = new CMTime(1, (int)ranges.First().MaxFrameRate); } var connection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection != null) { if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } CaptureSession.CommitConfiguration(); } CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); // set frame rate if Slow-mo is requested if (speedOptions == SpeedOptions.SlowMo) { foreach (var vFormat in device.Formats) { var _ranges = vFormat.VideoSupportedFrameRateRanges as AVFrameRateRange[]; var frameRates = _ranges[0]; if (frameRates.MaxFrameRate >= 240.0) { device.LockForConfiguration(out NSError _error); if (_error is null) { device.ActiveFormat = vFormat as AVCaptureDeviceFormat; device.ActiveVideoMinFrameDuration = frameRates.MinFrameDuration; device.ActiveVideoMaxFrameDuration = frameRates.MaxFrameDuration; device.UnlockForConfiguration(); break; } } } } IsPreviewing = true; }
public static AVCaptureDeviceInput FromDevice(AVCaptureDevice device, IntPtr handle) { NSError error; return FromDevice (device, out error); }
/// <summary> /// Initializes a new instance of the CameraDevice class. /// </summary> internal CameraDevice() { this.device = AVCaptureDevice.Devices.SingleOrDefault(device => device.Position == AVCaptureDevicePosition.Back); }
private bool addCameraInput( out string errorMessage ) { errorMessage = ""; videoCaptureDevice = this.cameraType == CameraType.FrontFacing ? MediaDevices.FrontCamera : MediaDevices.BackCamera; videoInput = AVCaptureDeviceInput.FromDevice(videoCaptureDevice); if (videoInput == null) { errorMessage = "No video capture device"; return false; } session.AddInput (videoInput); return true; }
private bool addAudioInput( out string errorMessage ) { errorMessage = ""; audioCaptureDevice = MediaDevices.Microphone; audioInput = AVCaptureDeviceInput.FromDevice(audioCaptureDevice); if (audioInput == null) { errorMessage = "No audio capture device"; return false; } session.AddInput (audioInput); return true; }
private void SetupCamera() { CaptureSession = null; CaptureSession = new AVCaptureSession(); CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto; currentDevice = null; inputDevice1 = null; inputDevice2 = null; foreach (AVCaptureDevice device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == AVCaptureDevicePosition.Front) { inputDevice1 = device; } else if (device.Position == AVCaptureDevicePosition.Back) { inputDevice2 = device; } } NSError error; if (inputDevice1.HasFlash) { inputDevice1.LockForConfiguration(out error); inputDevice1.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } if (inputDevice2.HasFlash) { inputDevice2.LockForConfiguration(out error); inputDevice2.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } frontCamera = AVCaptureDeviceInput.FromDevice(inputDevice1, out error); rearCamera = AVCaptureDeviceInput.FromDevice(inputDevice2, out error); currentDevice = inputDevice2; if (CaptureSession.CanAddInput(rearCamera)) { CaptureSession.AddInput(rearCamera); } AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = View.Frame; View.Layer.InsertSublayer(previewLayer, 0); StillImageOutput = new AVCaptureStillImageOutput(); StillImageOutput.OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG); CaptureSession.AddOutput(StillImageOutput); CaptureSession.StartRunning(); }
bool SetupCaptureSession() { var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = options.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } foundResult = false; //Detect barcodes with built in avcapture stuff AVCaptureMetadataOutput metadataOutput = new AVCaptureMetadataOutput(); captureDelegate = new CaptureDelegate(metaDataObjects => { if (!analyzing) { return; } //Console.WriteLine("Found MetaData Objects"); var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < options.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < options.DelayBetweenContinuousScans) || working) //|| CancelTokenSource.IsCancellationRequested) { return; } working = true; wasScanned = false; lastAnalysis = DateTime.UtcNow; var mdo = metaDataObjects.FirstOrDefault(); if (mdo == null) { return; } var readableObj = mdo as AVMetadataMachineReadableCodeObject; if (readableObj == null) { return; } wasScanned = true; var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString()); var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat); resultCallback(rs); working = false; }); metadataOutput.SetDelegate(captureDelegate, DispatchQueue.MainQueue); session.AddOutput(metadataOutput); //Setup barcode formats if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { #if __UNIFIED__ var formats = AVMetadataObjectType.None; foreach (var f in ScanningOptions.PossibleFormats) { formats |= AVCaptureBarcodeFormatFromZXingBarcodeFormat(f); } formats &= ~AVMetadataObjectType.None; metadataOutput.MetadataObjectTypes = formats; #else var formats = new List <string> (); foreach (var f in ScanningOptions.PossibleFormats) { formats.AddRange(AVCaptureBarcodeFormatFromZXingBarcodeFormat(f)); } metadataOutput.MetadataObjectTypes = (from f in formats.Distinct() select new NSString(f)).ToArray(); #endif } else { metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; } previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) { if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { var perf1 = PerformanceCounter.Start(); NSError lockForConfigErr = null; captureDevice.LockForConfiguration(out lockForConfigErr); if (lockForConfigErr == null) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10); captureDevice.UnlockForConfiguration(); } PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); } else { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } } #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); //output.AlwaysDiscardsLateVideoFrames = true; Console.WriteLine("SetupCamera Finished"); //session.AddOutput (output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new CGPoint(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new CGPoint(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
private void initializeRecorder() { //Initialize sessions session = new AVCaptureSession (); //get inputs and add them to the session. try{ //add front facing camera foreach(AVCaptureDevice capdevice in AVCaptureDevice.Devices) { if(capdevice.HasMediaType(AVMediaType.Video)) { if(capdevice.Position == AVCaptureDevicePosition.Front) { device = capdevice; } } } //device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice(device); session.AddInput(input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch(Exception ex) { return; } }
/// <summary> /// Returns an audio device /// </summary> /// <returns> /// The audio device. /// </returns> static AVCaptureDevice AudioDevice() { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Audio); return((devices.Length == 0) ? null : devices [0]); }
private void StartCameraWithCompletionHandler(Action<bool, NSError> completion) { captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); captureDevice = CameraDeviceForPosition (AVCaptureDevicePosition.Back); if (captureDevice == null) { string message = "Error message back camera - not found"; string title = "Error"; ShowErrorMessage (message, title); return; } NSError error; AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice (captureDevice, out error); if (deviceInput == null) { Console.WriteLine ("This error should be handled appropriately in your app -- obtain device input: {0}", error); string message = "Error message back camera - can't open."; string title = "Error"; ShowErrorMessage (message, title); return; } captureSession.AddInput (deviceInput); stillImageOutput = new AVCaptureStillImageOutput (); //Or instead of JPEG, we can use one of the following pixel formats: BGRA, 420f output stillImageOutput.OutputSettings = new NSDictionary (AVVideo.CodecKey, AVVideo.CodecJPEG); captureSession.AddOutput (stillImageOutput); cameraPreviewView.ConfigureCaptureSession (captureSession, stillImageOutput); captureSession.SessionPreset = AVCaptureSession.PresetPhoto; captureDeviceFormat = captureDevice.ActiveFormat; captureSession.CommitConfiguration (); captureSession.StartRunning (); maxBracketCount = stillImageOutput.MaxBracketedCaptureStillImageCount; PrepareBracketsWithCompletionHandler (completion); }
bool SetupCaptureSession() { var started = DateTime.UtcNow; var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = options.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } var startedAVPreviewLayerAlloc = DateTime.UtcNow; previewLayer = new AVCaptureVideoPreviewLayer(session); var totalAVPreviewLayerAlloc = DateTime.UtcNow - startedAVPreviewLayerAlloc; Console.WriteLine("PERF: Alloc AVCaptureVideoPreviewLayer took {0} ms.", totalAVPreviewLayerAlloc.TotalMilliseconds); // //Framerate set here (15 fps) // if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0)) // { // var perf1 = PerformanceCounter.Start (); // NSError lockForConfigErr = null; // captureDevice.LockForConfiguration (out lockForConfigErr); // if (lockForConfigErr == null) // { // captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 10); // captureDevice.UnlockForConfiguration (); // } // PerformanceCounter.Stop (perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); // } // else // previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); var perf2 = PerformanceCounter.Start(); #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA), CVPixelBuffer.PixelFormatTypeKey); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput { WeakVideoSettings = videoSettings }; // configure the output queue = new DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReaderiOS(null, (img) => { var src = new RGBLuminanceSourceiOS(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value) { return(src); } var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait; InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation); switch (tmpInterfaceOrientation) { case UIInterfaceOrientation.Portrait: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.PortraitUpsideDown: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.LandscapeLeft: return(src); case UIInterfaceOrientation.LandscapeRight: return(src); } return(src); }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (ScanningOptions.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value); barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value; } if (ScanningOptions.PureBarcode.HasValue) { barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value; } if (ScanningOptions.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value); barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value; } if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet)) { barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet; } if (ScanningOptions.TryInverted.HasValue) { barcodeReader.TryInverted = ScanningOptions.TryInverted.Value; } if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>(); foreach (var pf in ScanningOptions.PossibleFormats) { barcodeReader.Options.PossibleFormats.Add(pf); } } outputRecorder = new OutputRecorder(ScanningOptions, img => { if (!IsAnalyzing) { return(false); } try { //var sw = new System.Diagnostics.Stopwatch(); //sw.Start(); var rs = barcodeReader.Decode(img); //sw.Stop(); //Console.WriteLine("Decode Time: {0} ms", sw.ElapsedMilliseconds); if (rs != null) { resultCallback(rs); return(true); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } return(false); }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); session.AddOutput(output); //session.StartRunning (); var perf5 = PerformanceCounter.Start(); NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
static bool TryGetDefaultVideoCamera (AVCaptureDeviceType type, AVCaptureDevicePosition position, out AVCaptureDevice device) { device = AVCaptureDevice.GetDefaultDevice (type, AVMediaType.Video, position); return device != null; }