/******************************* MAIN FUNCTIONS *******************************/ public override void ViewDidLoad () { base.ViewDidLoad (); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice (captureDevice); CaptureSession = new AVCaptureSession(); CaptureSession.AddInput (input as AVCaptureInput); var captureMetadataOutput = new AVCaptureMetadataOutput(); metadataDelegate = new MetadataObjectsDelegate(); metadataDelegate.outer = this; captureMetadataOutput.SetDelegate(metadataDelegate, DispatchQueue.MainQueue); CaptureSession.AddOutput(captureMetadataOutput); captureMetadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode; VideoPreviewLayer = new AVCaptureVideoPreviewLayer (CaptureSession); VideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; VideoPreviewLayer.Frame = View.Layer.Bounds; View.Layer.AddSublayer (VideoPreviewLayer); View.BringSubviewToFront (messageLabel); QRCodeFrameView = new UIView (); QRCodeFrameView.Layer.BorderColor = UIColor.Green.CGColor; QRCodeFrameView.Layer.BorderWidth = 2; View.AddSubview (QRCodeFrameView); View.BringSubviewToFront (QRCodeFrameView); CaptureSession.StartRunning(); cancelButton.Clicked += (sender, e) => { this.DismissViewController (true, null); }; }
public void RecordVideoToPath(UIViewController ViewController, string VideoPath) { // setup capture device AVCaptureDevice videoRecordingDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); NSError error; AVCaptureDeviceInput videoInput = new AVCaptureDeviceInput(videoRecordingDevice, out error); // create and assign a capture session AVCaptureSession captureSession = new AVCaptureSession(); captureSession.SessionPreset = AVCaptureSession.Preset1280x720; captureSession.AddInput(videoInput); // Create capture device output AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput(); captureSession.AddOutput(videoOutput); videoOutput.VideoSettings.PixelFormat = CVPixelFormatType.CV32BGRA; videoOutput.MinFrameDuration = new CMTime(1, 30); videoOutput.SetSampleBufferDelegatequeue(captureVideoDelegate, System.IntPtr.Zero); // create a delegate class for handling capture captureVideoDelegate = new CaptureVideoDelegate(ViewController); // Start capture session captureSession.StartRunning(); }
void UserKnocked () { _state.didKnock = true; if (_state.CountDown > 0 && _state.CountDown < 5 && _state.PublishToTwitter == false) { _state.PublishToTwitter = true; session.StopRunning (); session.Dispose (); session = null; outputRecorder.Dispose (); outputRecorder = null; queue.Dispose (); queue = null; //Stop Both Timers AudioLoop.Invalidate (); timer.Invalidate (); //Stop Recording StopRecording (); //Start up Resording NSTimer.CreateScheduledTimer (TimeSpan.FromSeconds (10), AudioReset); NSTimer.CreateScheduledTimer (TimeSpan.FromSeconds (1), PostToTwitter); _state.PublishToTwitter = true; _state.TopLabel.Text = ""; _state.BottomLabel.Text = "Posted to Twitter!"; imgYouRock.Hidden = false; } }
void setupCaptureSession () { if (CaptureSession != null) return; CaptureSession = new AVCaptureSession (); NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession); applicationWillEnterForegroundNotificationObserver = NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (), UIApplication.SharedApplication, NSOperationQueue.CurrentQueue, delegate(NSNotification notification) { applicationWillEnterForeground (); }); videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); NSError error; videoInput = new AVCaptureDeviceInput (videoDevice, out error); if (CaptureSession.CanAddInput (videoInput)) CaptureSession.AddInput (videoInput); metadataOutput = new AVCaptureMetadataOutput (); var metadataQueue = new DispatchQueue ("com.AVCam.metadata"); metadataObjectsDelegate = new MetadataObjectsDelegate { DidOutputMetadataObjectsAction = DidOutputMetadataObjects }; metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue); if (CaptureSession.CanAddOutput (metadataOutput)) CaptureSession.AddOutput (metadataOutput); }
public void SetupLiveCameraStream() { captureSession = new AVCaptureSession(); var viewLayer = liveCameraStream.Layer; Console.WriteLine(viewLayer.Frame.Width); var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = liveCameraStream.Bounds }; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); Console.WriteLine(liveCameraStream.Layer.Frame.Width); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.AddInput(captureDeviceInput); captureSession.StartRunning(); ViewWillLayoutSubviews(); }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ // No input device return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // configure the output queue = new DispatchQueue ("myQueue"); qrScanner = new QrScanner (this); output.SetSampleBufferDelegateAndQueue (qrScanner, queue); session.AddOutput (output); previewLayer = new AVCaptureVideoPreviewLayer (session); previewLayer.Orientation = AVCaptureVideoOrientation.Portrait; previewLayer.VideoGravity = "AVLayerVideoGravityResizeAspectFill"; session.StartRunning (); return true; }
void setupAVCapture () { session = new AVCaptureSession (); session.SessionPreset = AVCaptureSession.PresetHigh; previewView.Session = session; updateCameraSelection (); CALayer rootLayer = previewView.Layer; rootLayer.MasksToBounds = true; // HACK: Remove .ToString() for AVLayerVideoGravity // (previewView.Layer as AVCaptureVideoPreviewLayer).VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString(); (previewView.Layer as AVCaptureVideoPreviewLayer).VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewView.Layer.BackgroundColor = UIColor.Black.CGColor; setupAVFoundationFaceDetection (); if (device != null) { device.AddObserver (this, (NSString) "videoZoomFactor", (NSKeyValueObservingOptions)0, VideoZoomFactorContext); device.AddObserver (this, (NSString) "rampingVideoZoom", (NSKeyValueObservingOptions)0, VideoZoomRampingContext); } session.StartRunning (); }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ Console.WriteLine ("No input device"); return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA), // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second MinFrameDuration = new CMTime (1, 15) }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (); output.SetSampleBufferDelegateAndQueue (outputRecorder, queue); session.AddOutput (output); session.StartRunning (); return true; }
public override void FinishedLaunching (UIApplication application) { // Create a new capture session Session = new AVCaptureSession (); Session.SessionPreset = AVCaptureSession.PresetMedium; // Create a device input CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (CaptureDevice == null) throw new Exception ("Video recording not supported on this device"); // Prepare device for configuration if (!CaptureDevice.LockForConfiguration (out Error)) { // There has been an issue, abort Console.WriteLine ("Error: {0}", Error.LocalizedDescription); CaptureDevice.UnlockForConfiguration (); return; } // Configure stream for 15 frames per second (fps) CaptureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15); // Unlock configuration CaptureDevice.UnlockForConfiguration (); // Get input from capture device Input = AVCaptureDeviceInput.FromDevice (CaptureDevice); if (Input == null) { // Error, report and abort Console.WriteLine ("Unable to gain input from capture device."); CameraAvailable = false; return; } // Attach input to session Session.AddInput (Input); // Create a new output var output = new AVCaptureVideoDataOutput (); var settings = new AVVideoSettingsUncompressed (); settings.PixelFormatType = CVPixelFormatType.CV32BGRA; output.WeakVideoSettings = settings.Dictionary; // Configure and attach to the output to the session Queue = new DispatchQueue ("ManCamQueue"); Recorder = new OutputRecorder (); output.SetSampleBufferDelegate (Recorder, Queue); Session.AddOutput (output); // Configure and attach a still image output for bracketed capture StillImageOutput = new AVCaptureStillImageOutput (); var dict = new NSMutableDictionary (); dict [AVVideo.CodecKey] = new NSNumber ((int)AVVideoCodec.JPEG); Session.AddOutput (StillImageOutput); // Let tabs know that a camera is available CameraAvailable = true; }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); return false; } //Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration() NSError error = null; captureDevice.LockForConfiguration(out error); if (error != null) { Console.WriteLine(error); captureDevice.UnlockForConfiguration(); return false; } captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); captureDevice.UnlockForConfiguration(); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); return false; } session.AddInput(input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput() { VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("myQueue"); outputRecorder = new OutputRecorder(); output.SetSampleBufferDelegate(outputRecorder, queue); session.AddOutput(output); session.StartRunning(); return true; }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (captureDevice == null) { Image<Bgr, Byte> img = new Image<Bgr, byte> (512, 512, new Bgr (255, 255, 255)); CvInvoke.PutText ( img, "Capture device not found.", new Point (10, 200), FontFace.HersheyComplex, 1, new MCvScalar (), 2); ImageView.Image = img.ToUIImage(); return false; } var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ Console.WriteLine ("No input device"); return false; } session.AddInput (input); // create a VideoDataOutput and add it to the sesion AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed(); settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA; var output = new AVCaptureVideoDataOutput () { UncompressedVideoSetting = settingUncomp, // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second //MinFrameDuration = new CMTime (1, 15) }; // configure the output queue = new DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (ImageView); output.SetSampleBufferDelegateQueue(outputRecorder, queue); session.AddOutput (output); session.StartRunning (); return true; }
public void ConfigureCaptureSession (AVCaptureSession captureSession, AVCaptureStillImageOutput captureOutput) { if (previewLayer != null) { previewLayer.RemoveFromSuperLayer (); previewLayer = null; } previewLayer = new AVCaptureVideoPreviewLayer (captureSession) { VideoGravity = AVPlayerLayer.GravityResizeAspect, Frame = Bounds }; Layer.AddSublayer (previewLayer); CaptureOutput = captureOutput; CaptureOutput.AddObserver (this, capturingStillImageKeypath, NSKeyValueObservingOptions.New, IntPtr.Zero); }
public void InitAndStartCamera() { session = new AVCaptureSession { SessionPreset = AVCaptureSession.PresetMedium }; var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); NSError error; var videoInput = AVCaptureDeviceInput.FromDevice(captureDevice, out error); if (videoInput == null || !session.CanAddInput(videoInput)) return; session.AddInput(videoInput); previewLayer = new AVCaptureVideoPreviewLayer(session) { Frame = rootView.Bounds }; previewLayer.Connection.VideoOrientation = configDicByRotationChanged[UIApplication.SharedApplication.StatusBarOrientation]; previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; cameraView.Layer.AddSublayer(previewLayer); session.StartRunning(); }
public override void WindowDidLoad() { base.WindowDidLoad(); session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); } session.AddInput(input); var captureVideoPreviewLayer = new AVCaptureVideoPreviewLayer(session); var contentView = Window.ContentView; contentView.WantsLayer = true; captureVideoPreviewLayer.Frame = contentView.Bounds; contentView.Layer.AddSublayer(captureVideoPreviewLayer); session.StartRunning(); }
void Initialize () { CaptureSession = new AVCaptureSession (); previewLayer = new AVCaptureVideoPreviewLayer (CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault (d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput (device, out error); CaptureSession.AddInput (input); Layer.AddSublayer (previewLayer); CaptureSession.StartRunning (); IsPreviewing = true; }
public override void ViewDidLoad () { base.ViewDidLoad (); session = new AVCaptureSession (); var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); var input = AVCaptureDeviceInput.FromDevice(camera); session.AddInput(input); output = new AVCaptureMetadataOutput(); var metadataDelegate = new MetadataOutputDelegate(); output.SetDelegate(metadataDelegate, DispatchQueue.MainQueue); session.AddOutput(output); output.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeQRCode, AVMetadataObject.TypeEAN13Code }; var previewLayer = new AVCaptureVideoPreviewLayer(session); //var view = new ContentView(UIColor.LightGray, previewLayer, metadataDelegate); previewLayer.MasksToBounds = true; previewLayer.VideoGravity = AVCaptureVideoPreviewLayer.GravityResizeAspectFill; previewLayer.Frame = UIScreen.MainScreen.Bounds; this.View.Layer.AddSublayer(previewLayer); metadataDelegate.MetadataFound += (s, e) => { session.StopRunning(); new UIAlertView("Scanned!",e.StringValue, null ,"OK",null).Show(); }; session.StartRunning(); }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = this.Frame; previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(this.Frame); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput() { //videoSettings VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReader(null, (img) => { var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value) { return(src); } var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait; InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation); switch (tmpInterfaceOrientation) { case UIInterfaceOrientation.Portrait: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.PortraitUpsideDown: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.LandscapeLeft: return(src); case UIInterfaceOrientation.LandscapeRight: return(src); } return(src); }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (ScanningOptions.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value); barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value; } if (ScanningOptions.PureBarcode.HasValue) { barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value; } if (ScanningOptions.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value); barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value; } if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet)) { barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet; } if (ScanningOptions.TryInverted.HasValue) { barcodeReader.TryInverted = ScanningOptions.TryInverted.Value; } if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>(); foreach (var pf in ScanningOptions.PossibleFormats) { barcodeReader.Options.PossibleFormats.Add(pf); } } outputRecorder = new OutputRecorder(ScanningOptions, img => { if (!IsAnalyzing) { return; } try { var started = DateTime.Now; var rs = barcodeReader.Decode(img); var total = DateTime.Now - started; Console.WriteLine("Decode Time: " + total.TotalMilliseconds + " ms"); if (rs != null) { resultCallback(rs); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); Console.WriteLine("SetupCamera Finished"); session.AddOutput(output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus; if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
void Initialize(bool defaultTorchOn, bool vibrationOnDetected, bool startScanningOnCreate, int scanInterval) { Configuration.IsScanning = startScanningOnCreate; CaptureSession = new AVCaptureSession(); CaptureSession.BeginConfiguration(); this.AutoresizingMask = UIViewAutoresizing.FlexibleDimensions; previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = this.Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = AVCaptureDevicePosition.Back; //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset1280x720; Layer.AddSublayer(previewLayer); CaptureSession.CommitConfiguration(); VideoDataOutput = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA } .Dictionary }; captureVideoDelegate = new CaptureVideoDelegate(vibrationOnDetected, scanInterval); captureVideoDelegate.OnDetected += (list) => { InvokeOnMainThread(() => { //CaptureSession.StopRunning(); this.OnDetected?.Invoke(list); }); }; VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue); CaptureSession.AddOutput(VideoDataOutput); InvokeOnMainThread(() => { CaptureSession.StartRunning(); //Torch on by default if (defaultTorchOn && !GoogleVisionBarCodeScanner.Methods.IsTorchOn()) { GoogleVisionBarCodeScanner.Methods.ToggleFlashlight(); } }); }
public bool SetupCapture() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume captureSession = new AVCaptureSession() { //SessionPreset = AVCaptureSession.PresetPhoto SessionPreset = AVCaptureSession.Preset1280x720 }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (captureDeviceInput == null) { Console.WriteLine("No video input device"); return(false); } if (captureSession.CanAddInput(captureDeviceInput)) { captureSession.AddInput(captureDeviceInput); } else { Console.WriteLine("Could not add input capture device to AVACaptureSession"); return(false); } // create a VideoDataOutput and add it to the sesion AVCaptureVideoDataOutput output = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = false, // true, WeakVideoSettings = new CVPixelBufferAttributes() { PixelFormatType = CVPixelFormatType.CV24RGB }.Dictionary //, // If you want to cap the frame rate at a given speed, in this sample: 30 frames per second //MinFrameDuration = new CMTime(1, 30) }; CoreFoundation.DispatchQueue videoCaptureQueue = new CoreFoundation.DispatchQueue("Video Capture Queue"); output.SetSampleBufferDelegateQueue(this, videoCaptureQueue); if (captureSession.CanAddOutput(output)) { captureSession.AddOutput(output); } else { return(false); } // add preview layer to this view controller's NSView AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(captureSession); previewLayer.Frame = this.View.Bounds; previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; if (this.View.Layer == null) { this.View.WantsLayer = true; this.View.Layer = previewLayer; } else { this.View.WantsLayer = true; this.View.Layer.AddSublayer(previewLayer); } captureSession.StartRunning(); return(true); }
private void InitCamera() { //ADD DEVICE INPUTS try { //If no camera avaiable, return if (!IsCameraAvailable) { return; } //Set up a new AV capture session session = new AVCaptureSession(); //Set up a new session //add video capture device var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); AVCaptureDevicePosition cameraPosition = (CameraOption == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); //Get the first device where the camera matches the requested camera if (device == null) { //use the default camera if front isn't available device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); } if (device == null) { return; //No device available } input = AVCaptureDeviceInput.FromDevice(device); session.AddInput(input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch (Exception ex) { return; } //Set up preview layer (shows what the input device sees) previewlayer = new AVCaptureVideoPreviewLayer(session); previewlayer.Frame = Bounds; if (OrientationOption == OrientationOptions.Landscape) { //landscape previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.LandscapeRight; //Video is recoreded upside down but oriented correctly for right handed people //previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; //VIdeo recorded portrait, face to left //previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.LandscapeLeft; } else { //portrait previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; } output = new AVCaptureMovieFileOutput(); long totalSeconds = 10000; Int32 preferredTimeScale = 30; CMTime maxDuration = new CMTime(totalSeconds, preferredTimeScale); output.MinFreeDiskSpaceLimit = 1024 * 1024; output.MaxRecordedDuration = maxDuration; if (session.CanAddOutput(output)) { session.AddOutput(output); } //Resolutions available @ http://stackoverflow.com/questions/19422322/method-to-find-devices-camera-resolution-ios session.SessionPreset = AVCaptureSession.PresetHigh; //Widescreen (Medium is 4:3) Layer.AddSublayer(previewlayer); //session.StartRunning(); //Moved this to StartPreviewing }
private void SetupCamera() { CaptureSession = null; CaptureSession = new AVCaptureSession(); CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto; currentDevice = null; inputDevice1 = null; inputDevice2 = null; foreach (AVCaptureDevice device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == AVCaptureDevicePosition.Front) { inputDevice1 = device; } else if (device.Position == AVCaptureDevicePosition.Back) { inputDevice2 = device; } } NSError error; if (inputDevice1.HasFlash) { inputDevice1.LockForConfiguration(out error); inputDevice1.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } if (inputDevice2.HasFlash) { inputDevice2.LockForConfiguration(out error); inputDevice2.FlashMode = AVCaptureFlashMode.Off; FlashButton.TitleLabel.Text = "Flash Off"; } frontCamera = AVCaptureDeviceInput.FromDevice(inputDevice1, out error); rearCamera = AVCaptureDeviceInput.FromDevice(inputDevice2, out error); currentDevice = inputDevice2; if (CaptureSession.CanAddInput(rearCamera)) { CaptureSession.AddInput(rearCamera); } AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = View.Frame; View.Layer.InsertSublayer(previewLayer, 0); StillImageOutput = new AVCaptureStillImageOutput(); StillImageOutput.OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG); CaptureSession.AddOutput(StillImageOutput); CaptureSession.StartRunning(); }
void CreateSession () { session = new AVCaptureSession (); session.SessionPreset = AVCaptureSession.PresetMedium; }
private bool setupCaptureSessionInternal( out string errorMessage ) { errorMessage = ""; // create the capture session session = new AVCaptureSession(); switch ( resolution ) { case Resolution.Low: session.SessionPreset = AVCaptureSession.PresetLow; break; case Resolution.High: session.SessionPreset = AVCaptureSession.PresetHigh; break; case Resolution.Medium: default: session.SessionPreset = AVCaptureSession.PresetMedium; break; } // conditionally configure the camera input if ( captureVideo || captureImages) { if ( addCameraInput( out errorMessage ) == false ) { return false; } } // conditionally configure the microphone input if ( captureAudio ) { if ( addAudioInput( out errorMessage ) == false ) { return false; } } // conditionally configure the sample buffer output if ( captureImages ) { int minimumSampleIntervalInMilliSeconds = captureVideo ? 1000 : 100; if ( addImageSamplerOutput( out errorMessage, minimumSampleIntervalInMilliSeconds ) == false ) { return false; } } // conditionally configure the movie file output if ( shouldRecord ) { if ( addMovieFileOutput( out errorMessage ) == false ) { return false; } } return true; }
public override void ViewDidLoad() { base.ViewDidLoad(); weAreRecording = false; lblError.Hidden = true; btnStartRecording.SetTitle("Start Recording", UIControlState.Normal); //Set up session session = new AVCaptureSession(); //Set up inputs and add them to the session //this will only work if using a physical device! Console.WriteLine("getting device inputs"); try{ //add video capture device device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice(device); session.AddInput(input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch (Exception ex) { //show the label error. This will always show when running in simulator instead of physical device. lblError.Hidden = false; return; } //Set up preview layer (shows what the input device sees) Console.WriteLine("setting up preview layer"); previewlayer = new AVCaptureVideoPreviewLayer(session); previewlayer.Frame = this.View.Bounds; //this code makes UI controls sit on top of the preview layer! Allows you to just place the controls in interface builder UIView cameraView = new UIView(); cameraView = new UIView(); cameraView.Layer.AddSublayer(previewlayer); this.View.AddSubview(cameraView); this.View.SendSubviewToBack(cameraView); Console.WriteLine("Configuring output"); output = new AVCaptureMovieFileOutput(); long totalSeconds = 10000; Int32 preferredTimeScale = 30; CMTime maxDuration = new CMTime(totalSeconds, preferredTimeScale); output.MinFreeDiskSpaceLimit = 1024 * 1024; output.MaxRecordedDuration = maxDuration; if (session.CanAddOutput(output)) { session.AddOutput(output); } session.SessionPreset = AVCaptureSession.PresetMedium; Console.WriteLine("About to start running session"); session.StartRunning(); //toggle recording button was pushed. btnStartRecording.TouchUpInside += startStopPushed; //Console.ReadLine (); }
private void StartCameraWithCompletionHandler(Action<bool, NSError> completion) { captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); captureDevice = CameraDeviceForPosition (AVCaptureDevicePosition.Back); if (captureDevice == null) { string message = "Error message back camera - not found"; string title = "Error"; ShowErrorMessage (message, title); return; } NSError error; AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice (captureDevice, out error); if (deviceInput == null) { Console.WriteLine ("This error should be handled appropriately in your app -- obtain device input: {0}", error); string message = "Error message back camera - can't open."; string title = "Error"; ShowErrorMessage (message, title); return; } captureSession.AddInput (deviceInput); stillImageOutput = new AVCaptureStillImageOutput (); //Or instead of JPEG, we can use one of the following pixel formats: BGRA, 420f output stillImageOutput.OutputSettings = new NSDictionary (AVVideo.CodecKey, AVVideo.CodecJPEG); captureSession.AddOutput (stillImageOutput); cameraPreviewView.ConfigureCaptureSession (captureSession, stillImageOutput); captureSession.SessionPreset = AVCaptureSession.PresetPhoto; captureDeviceFormat = captureDevice.ActiveFormat; captureSession.CommitConfiguration (); captureSession.StartRunning (); maxBracketCount = stillImageOutput.MaxBracketedCaptureStillImageCount; PrepareBracketsWithCompletionHandler (completion); }
public void Initialize() { this.Frame = new CGRect(new CGPoint(0, 0), new CGSize(mc_iPreviewWidth, mc_iPreviewHeight)); m_AVCapSession = new AVCaptureSession(); //m_AVCapDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video); var arCamDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); if (arCamDevice.Length != 0) { m_AVCapDevice = arCamDevice[0]; //フロントカメラを取得 foreach (AVCaptureDevice camDevice in arCamDevice) { if (camDevice.Position == AVCaptureDevicePosition.Back) { m_AVCapDevice = camDevice; } /* * if (camDevice.Position == AVCaptureDevicePosition.Back && m_iCameraDevice == 1) * { * m_AVCapDevice = camDevice; * } */ } if (m_AVCapDevice == null) { m_AVCapDevice = arCamDevice[0]; } } NSError device_error; m_AVCapDevice.LockForConfiguration(out device_error); if (device_error != null) { Console.WriteLine($"Error: {device_error.LocalizedDescription}"); m_AVCapDevice.UnlockForConfiguration(); return; } //フレームレート設定 m_AVCapDevice.ActiveVideoMinFrameDuration = new CMTime(1, 24); m_AVCapDevice.UnlockForConfiguration(); if (m_AVCapDevice == null) { return; } NSError error = null; try{ //m_AVInput = new AVCaptureDeviceInput(m_AVCapDevice, out error); m_AVInput = AVCaptureDeviceInput.FromDevice(m_AVCapDevice); if (error != null) { Console.WriteLine(error.ToString()); } else { m_AVCapSession.AddInput(m_AVInput); m_AVCapSession.BeginConfiguration(); m_AVCapSession.CanSetSessionPreset(AVCaptureSession.PresetHigh); m_AVCapSession.CommitConfiguration(); m_AVVideoOutput = new AVCaptureVideoDataOutput() { AlwaysDiscardsLateVideoFrames = true, WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary }; m_OutputRecorder = new OutputRecorder() { m_CustomCamera = m_CustomCamera }; var Queue = new DispatchQueue("myQueue"); m_AVVideoOutput.SetSampleBufferDelegateQueue(m_OutputRecorder, Queue); m_AVCapSession.AddOutput(m_AVVideoOutput); } m_prevAVLayer = new AVCaptureVideoPreviewLayer(m_AVCapSession) { Frame = new CGRect(new CGPoint(0, 0), new CGSize(mc_iPreviewWidth, mc_iPreviewHeight)), VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; Layer.AddSublayer(m_prevAVLayer); m_AVCapSession.StartRunning(); } catch (Exception e) { Console.WriteLine(e.ToString()); } return; }
void teardownAVCapture() { session.StopRunning (); teardownAVFoundationFaceDetection (); device.UnlockForConfiguration (); device.RemoveObserver (this, (NSString) "videoZoomFactor"); device.RemoveObserver (this, (NSString) "rampingVideoZoom"); device = null; session = null; }
public override void ViewDidLoad() { base.ViewDidLoad(); View.BackgroundColor = UIColor.Black; NSError error = null; _captureSession = new AVCaptureSession(); CameraMetaDataDelegate del = null; var authStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); AVCaptureDevice captureDevice = null; // check authorization status if (authStatus == AVAuthorizationStatus.Authorized) { captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); // update for iOS 13 } else if (authStatus == AVAuthorizationStatus.NotDetermined) { AVCaptureDevice.RequestAccessForMediaType(AVMediaType.Video, (granted) => { if (!granted) { iApp.Log.Error("ViewDidLoadBase ScanLayer RequestAccessForMediaType not granted!"); } else { iApp.Log.Error("ViewDidLoadBase ScanLayer RequestAccessForMediaType granted!"); } }); } else { iApp.Log.Error("Not Authorized! Status: " + authStatus.ToString()); } if (captureDevice != null) { var videoInput = AVCaptureDeviceInput.FromDevice(captureDevice, out error); if (videoInput != null) { _captureSession.AddInput(videoInput); } else { iApp.Log.Error("Video capture error: " + error.LocalizedDescription); } var metaDataOutput = new AVCaptureMetadataOutput(); _captureSession.AddOutput(metaDataOutput); del = new CameraMetaDataDelegate(this, _layer); metaDataOutput.SetDelegate(del, CoreFoundation.DispatchQueue.MainQueue); //metaDataOutput.MetadataObjectTypes = metaDataOutput.AvailableMetadataObjectTypes; metaDataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode | AVMetadataObjectType.Code128Code | AVMetadataObjectType.UPCECode | AVMetadataObjectType.EAN13Code; _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession) { Frame = View.Bounds, Orientation = (AVCaptureVideoOrientation)InterfaceOrientation, }; View.Layer.AddSublayer(_videoPreviewLayer); var image = TouchStyle.ImageFromResource("barcode-overlay-sm.png"); _imageOverlay = new UIImageView(image) { Frame = View.Frame, ContentMode = UIViewContentMode.Center, AutoresizingMask = UIViewAutoresizing.FlexibleMargins, }; View.Add(_imageOverlay); // preload this, and display when scan event occurs var imageScanBlocked = TouchStyle.ImageFromResource("barcode-scanblocked-sm.png"); _imageOverlayScanBlocked = new UIImageView(imageScanBlocked) { Frame = View.Frame, ContentMode = UIViewContentMode.Center, AutoresizingMask = UIViewAutoresizing.FlexibleMargins, Hidden = true, }; View.Add(_imageOverlayScanBlocked); } else { //TODO: Add "Scanner currently not active overlay Image" iApp.Log.Error("null capture device!"); } nfloat startVerticalLoc = UIScreen.MainScreen.Bounds.Height - LastValueScanOverlay.ViewHeight; _lastScanOverlay = new LastValueScanOverlay(startVerticalLoc, _layerFont); View.Add(_lastScanOverlay); NavigationItem.LeftBarButtonItem = new UIBarButtonItem("Done", UIBarButtonItemStyle.Done, delegate { string scannedBarcodes = string.Empty; if (del != null && del.Buffer != null && del.Buffer.CurrentBuffer != null) { foreach (var s in del.Buffer.CurrentBuffer) { scannedBarcodes += s + "\r\n"; } } if (_callback.Parameters == null) { _callback.Parameters = new Dictionary <string, string>(); } _callback.Parameters[_barcodeValueKey] = scannedBarcodes; iApp.Navigate(_callback); ModalManager.EnqueueModalTransition(TouchFactory.Instance.TopViewController, null, true); }); }
bool SetupCaptureSession() { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession(); captureSession.BeginConfiguration(); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); if (audioDevice == null) { return(false); // e.g. simulator } var audioIn = new AVCaptureDeviceInput(audioDevice, out error); if (captureSession.CanAddInput(audioIn)) { captureSession.AddInput(audioIn); } var audioOut = new AVCaptureAudioDataOutput(); var audioCaptureQueue = new DispatchQueue("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue); if (captureSession.CanAddOutput(audioOut)) { captureSession.AddOutput(audioOut); } audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput(videoDevice, out error); if (captureSession.CanAddInput(videoIn)) { captureSession.AddInput(videoIn); } // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA) }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue); if (captureSession.CanAddOutput(videoOut)) { captureSession.AddOutput(videoOut); } // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType(AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration(); return(true); }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } foundResult = false; //Detect barcodes with built in avcapture stuff AVCaptureMetadataOutput metadataOutput = new AVCaptureMetadataOutput(); var dg = new CaptureDelegate(metaDataObjects => { if (foundResult) { return; } //Console.WriteLine("Found MetaData Objects"); var mdo = metaDataObjects.FirstOrDefault(); if (mdo == null) { return; } var readableObj = mdo as AVMetadataMachineReadableCodeObject; if (readableObj == null) { return; } foundResult = true; //Console.WriteLine("Barcode: " + readableObj.StringValue); var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString()); var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat); resultCallback(rs); }); metadataOutput.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue); session.AddOutput(metadataOutput); //Setup barcode formats if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { var formats = new List <string> (); foreach (var f in ScanningOptions.PossibleFormats) { formats.AddRange(AVCaptureBarcodeFormatFromZXingBarcodeFormat(f)); } metadataOutput.MetadataObjectTypes = (from f in formats.Distinct() select new NSString(f)).ToArray(); } else { metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; } previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) { if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { var perf1 = PerformanceCounter.Start(); NSError lockForConfigErr = null; captureDevice.LockForConfiguration(out lockForConfigErr); if (lockForConfigErr == null) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10); captureDevice.UnlockForConfiguration(); } PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); } else { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } } previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = new RectangleF(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new RectangleF(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); //output.AlwaysDiscardsLateVideoFrames = true; Console.WriteLine("SetupCamera Finished"); //session.AddOutput (output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeAutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
bool SetupCaptureSession() { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; AVCaptureDevice captureDevice = null; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (AVCaptureDevice Device in videoDevices) { if (Device.Position == AVCaptureDevicePosition.Front) { captureDevice = Device; break; } } // create a device input and attach it to the session if (captureDevice == null) { captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); } if (captureDevice == null) { return(false); } //Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration() NSError error = null; captureDevice.LockForConfiguration(out error); if (error != null) { captureDevice.UnlockForConfiguration(); return(false); } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); } captureDevice.UnlockForConfiguration(); var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { return(false); } session.AddInput(input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput() { VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("myQueue"); outputRecorder = new OutputRecorder(_state); output.SetSampleBufferDelegate(outputRecorder, queue); session.AddOutput(output); session.StartRunning(); return(true); }
public AVCaptureVideoPreviewLayer(AVCaptureSession session) : this(session, InitMode.WithConnection) { }
//Protected protected AVCaptureSession MaybeInitializeSession() { //Create the capture session var session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium }; //Setup the video capture var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (captureDevice == null) { Failure.Alert("No captureDevice - this won't work on the simulator, try a physical device"); return null; } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Failure.Alert("No input - this won't work on the simulator, try a physical device"); return null; } session.AddInput(input); // create a VideoDataOutput and add it to the sesion var output = new AVCaptureVideoDataOutput() { VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output var queue = new MonoTouch.CoreFoundation.DispatchQueue("myQueue"); output.SetSampleBufferDelegate(this, queue); session.AddOutput(output); return session; }
bool SetupCaptureSession() { var started = DateTime.UtcNow; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetPhoto }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (ScanningOptions.UseFrontCameraIfAvailable.HasValue && ScanningOptions.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!ScanningOptions.UseFrontCameraIfAvailable.HasValue || !ScanningOptions.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } var startedAVPreviewLayerAlloc = PerformanceCounter.Start(); previewLayer = new AVCaptureVideoPreviewLayer(session); PerformanceCounter.Stop(startedAVPreviewLayerAlloc, "Alloc AVCaptureVideoPreviewLayer took {0} ms."); var perf2 = PerformanceCounter.Start(); #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA), CVPixelBuffer.PixelFormatTypeKey); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput { WeakVideoSettings = videoSettings }; // configure the output queue = new DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = ScanningOptions.BuildBarcodeReader(); outputRecorder = new OutputRecorder(this, img => { var ls = img; if (!IsAnalyzing) { return(false); } try { var perfDecode = PerformanceCounter.Start(); if (shouldRotatePreviewBuffer) { ls = ls.rotateCounterClockwise(); } var result = barcodeReader.Decode(ls); PerformanceCounter.Stop(perfDecode, "Decode Time: {0} ms"); if (result != null) { resultCallback(result); return(true); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } return(false); }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); session.AddOutput(output); //session.StartRunning (); var perf5 = PerformanceCounter.Start(); NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (ScanningOptions.DisableAutofocus) { captureDevice.FocusMode = AVCaptureFocusMode.Locked; } else { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
public VideoCapture(IAVCaptureVideoDataOutputSampleBufferDelegate delegateObject) { Delegate = delegateObject; Session = new AVCaptureSession(); SetupCamera(); }
/// <summary> /// 스캐너를 초기화합니다. /// </summary> /// <param name="barcodeFormat">인식할 바코드 포멧</param> /// <param name="sessionPreset">해상도</param> /// <returns></returns> private bool InitScanner(AVMetadataObjectType barcodeFormat, NSString sessionPreset) { // 카메라 접근 권한 확인 if (!IsCameraAuthorized) { this.Write("카메라 사용이 허용되지 않습니다."); return(false); } // 후면 카메라를 캡처할 장치로 설정 Device = AVCaptureDevice .DevicesWithMediaType(AVMediaType.Video) .FirstOrDefault(e => e.Position == AVCaptureDevicePosition.Back); if (Device == null) { this.Write("후면 카메라가 없습니다."); return(false); } // 입력 설정 Input = AVCaptureDeviceInput.FromDevice(Device); if (Input == null) { this.Write("AVCaptureDeviceInput이 null 입니다."); return(false); } // 출력 설정 CaptureDelegate = new CaptureDelegate((metadataObjects) => { if (BarcodeDetected == null) { return; } foreach (var metadata in metadataObjects) { var data = ((AVMetadataMachineReadableCodeObject)metadata).StringValue; BarcodeDetected?.Invoke(new BarcodeData(metadata.Type.ToTmonFormat(), data)); } }); Output = new AVCaptureMetadataOutput(); Output.SetDelegate(CaptureDelegate, DispatchQueue.MainQueue); // 세션 설정 Session = new AVCaptureSession() { SessionPreset = sessionPreset, }; Session.AddInput(Input); Session.AddOutput(Output); // 검출할 바코드 포멧 설정(중요 : 반드시 세션 설정 뒤에 와야함) Output.MetadataObjectTypes = barcodeFormat; // 프리뷰어 설정 Previewer = AVCaptureVideoPreviewLayer.FromSession(Session); Previewer.Frame = CGRect.Empty; Previewer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; Previewer.Connection.VideoOrientation = DeviceOrientation; return(true); }
bool SetupCaptureSession() { var started = DateTime.UtcNow; var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = options.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } var startedAVPreviewLayerAlloc = DateTime.UtcNow; previewLayer = new AVCaptureVideoPreviewLayer(session); var totalAVPreviewLayerAlloc = DateTime.UtcNow - startedAVPreviewLayerAlloc; Console.WriteLine("PERF: Alloc AVCaptureVideoPreviewLayer took {0} ms.", totalAVPreviewLayerAlloc.TotalMilliseconds); // //Framerate set here (15 fps) // if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0)) // { // var perf1 = PerformanceCounter.Start (); // NSError lockForConfigErr = null; // captureDevice.LockForConfiguration (out lockForConfigErr); // if (lockForConfigErr == null) // { // captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 10); // captureDevice.UnlockForConfiguration (); // } // PerformanceCounter.Stop (perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); // } // else // previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); var perf2 = PerformanceCounter.Start(); #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA), CVPixelBuffer.PixelFormatTypeKey); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput { WeakVideoSettings = videoSettings }; // configure the output queue = new DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReaderiOS(null, (img) => { var src = new RGBLuminanceSourceiOS(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value) { return(src); } var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait; InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation); switch (tmpInterfaceOrientation) { case UIInterfaceOrientation.Portrait: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.PortraitUpsideDown: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.LandscapeLeft: return(src); case UIInterfaceOrientation.LandscapeRight: return(src); } return(src); }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (ScanningOptions.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value); barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value; } if (ScanningOptions.PureBarcode.HasValue) { barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value; } if (ScanningOptions.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value); barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value; } if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet)) { barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet; } if (ScanningOptions.TryInverted.HasValue) { barcodeReader.TryInverted = ScanningOptions.TryInverted.Value; } if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>(); foreach (var pf in ScanningOptions.PossibleFormats) { barcodeReader.Options.PossibleFormats.Add(pf); } } outputRecorder = new OutputRecorder(ScanningOptions, img => { if (!IsAnalyzing) { return(false); } try { //var sw = new System.Diagnostics.Stopwatch(); //sw.Start(); var rs = barcodeReader.Decode(img); //sw.Stop(); //Console.WriteLine("Decode Time: {0} ms", sw.ElapsedMilliseconds); if (rs != null) { resultCallback(rs); return(true); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } return(false); }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); session.AddOutput(output); //session.StartRunning (); var perf5 = PerformanceCounter.Start(); NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
public override void FinishedLaunching(UIApplication application) { // Create a new capture session Session = new AVCaptureSession(); Session.SessionPreset = AVCaptureSession.PresetMedium; // Create a device input CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (CaptureDevice == null) { // Video capture not supported, abort Console.WriteLine("Video recording not supported on this device"); CameraAvailable = false; return; } // Prepare device for configuration if (!CaptureDevice.LockForConfiguration(out Error)) { // There has been an issue, abort Console.WriteLine("Error: {0}", Error.LocalizedDescription); CaptureDevice.UnlockForConfiguration(); return; } // Configure stream for 15 frames per second (fps) CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); // Unlock configuration CaptureDevice.UnlockForConfiguration(); // Get input from capture device Input = AVCaptureDeviceInput.FromDevice(CaptureDevice); if (Input == null) { // Error, report and abort Console.WriteLine("Unable to gain input from capture device."); CameraAvailable = false; return; } // Attach input to session Session.AddInput(Input); // Create a new output var output = new AVCaptureVideoDataOutput(); var settings = new AVVideoSettingsUncompressed(); settings.PixelFormatType = CVPixelFormatType.CV32BGRA; output.WeakVideoSettings = settings.Dictionary; // Configure and attach to the output to the session Queue = new DispatchQueue("ManCamQueue"); Recorder = new OutputRecorder(); output.SetSampleBufferDelegate(Recorder, Queue); Session.AddOutput(output); // Configure and attach a still image output for bracketed capture StillImageOutput = new AVCaptureStillImageOutput(); var dict = new NSMutableDictionary(); dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); Session.AddOutput(StillImageOutput); // Let tabs know that a camera is available CameraAvailable = true; }
private bool setupCaptureSessionInternal(out string errorMessage) { errorMessage = ""; // create the capture session session = new AVCaptureSession(); switch (resolution) { case Resolution.Low: session.SessionPreset = AVCaptureSession.PresetLow; break; case Resolution.High: session.SessionPreset = AVCaptureSession.PresetHigh; break; case Resolution.Medium: default: session.SessionPreset = AVCaptureSession.PresetMedium; break; } // conditionally configure the camera input if (captureVideo || captureImages) { if (addCameraInput(out errorMessage) == false) { return(false); } } // conditionally configure the microphone input if (captureAudio) { if (addAudioInput(out errorMessage) == false) { return(false); } } // conditionally configure the sample buffer output if (captureImages) { int minimumSampleIntervalInMilliSeconds = captureVideo ? 1000 : 100; if (addImageSamplerOutput(out errorMessage, minimumSampleIntervalInMilliSeconds) == false) { return(false); } } // conditionally configure the movie file output if (shouldRecord) { if (addMovieFileOutput(out errorMessage) == false) { return(false); } } return(true); }
bool SetupCaptureSession() { var availableResolutions = new List <CameraResolution> (); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = options.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } foundResult = false; //Detect barcodes with built in avcapture stuff AVCaptureMetadataOutput metadataOutput = new AVCaptureMetadataOutput(); var dg = new CaptureDelegate(metaDataObjects => { if (foundResult) { return; } //Console.WriteLine("Found MetaData Objects"); var mdo = metaDataObjects.FirstOrDefault(); if (mdo == null) { return; } var readableObj = mdo as AVMetadataMachineReadableCodeObject; if (readableObj == null) { return; } foundResult = true; //Console.WriteLine("Barcode: " + readableObj.StringValue); var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString()); var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat); resultCallback(rs); }); metadataOutput.SetDelegate(dg, DispatchQueue.MainQueue); session.AddOutput(metadataOutput); //Setup barcode formats if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { #if __UNIFIED__ var formats = AVMetadataObjectType.None; foreach (var f in ScanningOptions.PossibleFormats) { formats |= AVCaptureBarcodeFormatFromZXingBarcodeFormat(f); } formats &= ~AVMetadataObjectType.None; metadataOutput.MetadataObjectTypes = formats; #else var formats = new List <string> (); foreach (var f in ScanningOptions.PossibleFormats) { formats.AddRange(AVCaptureBarcodeFormatFromZXingBarcodeFormat(f)); } metadataOutput.MetadataObjectTypes = (from f in formats.Distinct() select new NSString(f)).ToArray(); #endif } else { metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; } previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) { if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { var perf1 = PerformanceCounter.Start(); NSError lockForConfigErr = null; captureDevice.LockForConfiguration(out lockForConfigErr); if (lockForConfigErr == null) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10); captureDevice.UnlockForConfiguration(); } PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); } else { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } } #if __UNIFIED__ previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } session.StartRunning(); Console.WriteLine("RUNNING!!!"); //output.AlwaysDiscardsLateVideoFrames = true; Console.WriteLine("SetupCamera Finished"); //session.AddOutput (output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new CGPoint(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new CGPoint(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
void CreateSession() { session = new AVCaptureSession(); session.SessionPreset = AVCaptureSession.PresetMedium; }
bool SetupCaptureSession () { //Console.WriteLine ("SetupCaptureSession"); // Overview: RosyWriter uses separate GCD queues for audio and video capture. If a single GCD queue // is used to deliver both audio and video buffers, and our video processing consistently takes // too long, the delivery queue can back up, resulting in audio being dropped. // // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter. This ensures // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously. // // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh. // Create Capture session captureSession = new AVCaptureSession (); captureSession.BeginConfiguration (); // Create audio connection NSError error; var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio); if (audioDevice == null) return false; // e.g. simulator var audioIn = new AVCaptureDeviceInput (audioDevice, out error); if (captureSession.CanAddInput (audioIn)) captureSession.AddInput (audioIn); var audioOut = new AVCaptureAudioDataOutput (); var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue"); // Add the Delegate to capture each sample that comes through audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue); if (captureSession.CanAddOutput (audioOut)) captureSession.AddOutput (audioOut); audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio); // Create Video Session var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back); var videoIn = new AVCaptureDeviceInput (videoDevice, out error); if (captureSession.CanAddInput (videoIn)) captureSession.AddInput (videoIn); // RosyWriter prefers to discard late video frames early in the capture pipeline, since its // processing can take longer than real-time on some platforms (such as iPhone 3GS). // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's // alwaysDiscardsLateVideoFrames property to NO. var videoOut = new AVCaptureVideoDataOutput { AlwaysDiscardsLateVideoFrames = true, VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA) }; // Create a DispatchQueue for the Video Processing var videoCaptureQueue = new DispatchQueue ("Video Capture Queue"); videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue); if (captureSession.CanAddOutput (videoOut)) captureSession.AddOutput (videoOut); // Set the Video connection from the Video Output object videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video); videoOrientation = videoConnection.VideoOrientation; captureSession.CommitConfiguration (); return true; }
bool SetupCaptureSession() { var started = DateTime.UtcNow; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (options.UseFrontCameraIfAvailable.HasValue && options.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } var startedAVPreviewLayerAlloc = DateTime.UtcNow; previewLayer = new AVCaptureVideoPreviewLayer(session); var totalAVPreviewLayerAlloc = DateTime.UtcNow - startedAVPreviewLayerAlloc; Console.WriteLine("PERF: Alloc AVCaptureVideoPreviewLayer took {0} ms.", totalAVPreviewLayerAlloc.TotalMilliseconds); //Framerate set here (15 fps) if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0)) { var perf1 = PerformanceCounter.Start(); NSError lockForConfigErr = null; captureDevice.LockForConfiguration(out lockForConfigErr); if (lockForConfigErr == null) { captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10); captureDevice.UnlockForConfiguration(); } PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms"); } else { previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); } var perf2 = PerformanceCounter.Start(); previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = new RectangleF(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new RectangleF(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview(overlayView); this.BringSubviewToFront(overlayView); //overlayView.LayoutSubviews (); } PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput() { //videoSettings VideoSettings = new AVVideoSettings(CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReader(null, (img) => { var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (ScanningOptions.AutoRotate.HasValue && ScanningOptions.AutoRotate.Value) { return(src); } var tmpInterfaceOrientation = UIInterfaceOrientation.Portrait; InvokeOnMainThread(() => tmpInterfaceOrientation = UIApplication.SharedApplication.StatusBarOrientation); switch (tmpInterfaceOrientation) { case UIInterfaceOrientation.Portrait: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.PortraitUpsideDown: return(src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise()); case UIInterfaceOrientation.LandscapeLeft: return(src); case UIInterfaceOrientation.LandscapeRight: return(src); } return(src); }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (ScanningOptions.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + ScanningOptions.TryHarder.Value); barcodeReader.Options.TryHarder = ScanningOptions.TryHarder.Value; } if (ScanningOptions.PureBarcode.HasValue) { barcodeReader.Options.PureBarcode = ScanningOptions.PureBarcode.Value; } if (ScanningOptions.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + ScanningOptions.AutoRotate.Value); barcodeReader.AutoRotate = ScanningOptions.AutoRotate.Value; } if (!string.IsNullOrEmpty(ScanningOptions.CharacterSet)) { barcodeReader.Options.CharacterSet = ScanningOptions.CharacterSet; } if (ScanningOptions.TryInverted.HasValue) { barcodeReader.TryInverted = ScanningOptions.TryInverted.Value; } if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List <BarcodeFormat>(); foreach (var pf in ScanningOptions.PossibleFormats) { barcodeReader.Options.PossibleFormats.Add(pf); } } outputRecorder = new OutputRecorder(ScanningOptions, img => { if (!IsAnalyzing) { return; } try { //var sw = new System.Diagnostics.Stopwatch(); //sw.Start(); var rs = barcodeReader.Decode(img); //sw.Stop(); //Console.WriteLine("Decode Time: {0} ms", sw.ElapsedMilliseconds); if (rs != null) { resultCallback(rs); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); session.AddOutput(output); //session.StartRunning (); var perf5 = PerformanceCounter.Start(); NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeAutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
public void StopCaptureSession() { session.StopRunning(); session.Dispose(); session = null; }
public Task <OperationResult> Setup(bool enableAudioRecording, bool enableStillImageCapture = false, UIInterfaceOrientation orientation = UIInterfaceOrientation.Portrait, int numberOfCameras = 1) { TaskCompletionSource <OperationResult> tcs = new TaskCompletionSource <OperationResult>(); var warnings = new List <string>(); NumberOfCameras = numberOfCameras; _enableAudioRecording = enableAudioRecording; _enableStillImageCapture = enableStillImageCapture; _session = new AVCaptureSession(); _backgroundRecordingID = -1; NSError error; var result = AVCaptureDeviceFactory.CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back); if (!result.IsSuccessful) { _setupResult = CameraSetupResult.SessionConfigurationFailed; tcs.SetResult(OperationResult.AsFailure("No video devices found, probably running in the simulator")); return(tcs.Task); } _videoDeviceInput = AVCaptureDeviceInput.FromDevice(result.Result, out error); if (_videoDeviceInput == null) { _setupResult = CameraSetupResult.SessionConfigurationFailed; tcs.SetResult(OperationResult.AsFailure(@"Could not create video device input: {error}")); return(tcs.Task); } _session.BeginConfiguration(); if (_session.CanAddInput(_videoDeviceInput)) { _session.AddInput(_videoDeviceInput); var initialVideoOrientation = (AVCaptureVideoOrientation)(long)orientation; PreviewLayer.Session = _session; PreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; PreviewLayer.Connection.VideoOrientation = initialVideoOrientation; } else { _setupResult = CameraSetupResult.SessionConfigurationFailed; tcs.SetResult(OperationResult.AsFailure("Could not add video device input to the session")); return(tcs.Task); } if (_enableAudioRecording) { AVCaptureDevice audioDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { warnings.Add(@"Could not create audio device input: {error}"); } else { if (_session.CanAddInput(audioDeviceInput)) { _session.AddInput(audioDeviceInput); } else { warnings.Add("Could not add audio device input to the session"); } } } _movieFileOutput = new AVCaptureMovieFileOutput(); if (_session.CanAddOutput(_movieFileOutput)) { _session.AddOutput(_movieFileOutput); AVCaptureConnection connection = _movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } else { warnings.Add("Could not add movie file output to the session"); _setupResult = CameraSetupResult.SessionConfigurationFailed; } if (_enableStillImageCapture) { _stillImageOutput = new AVCaptureStillImageOutput(); if (_session.CanAddOutput(_stillImageOutput)) { _stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; _session.AddOutput(_stillImageOutput); } else { warnings.Add("Could not add still image output to the session"); _setupResult = CameraSetupResult.SessionConfigurationFailed; } } _session.CommitConfiguration(); _setupResult = CameraSetupResult.Success; tcs.SetResult(OperationResult.AsSuccess(string.Empty, warnings)); AddObservers(); return(tcs.Task); }
private bool SetupCaptureSession() { if (CameraPreviewSettings.Instance.Decoder == null) { return(false); } var started = DateTime.UtcNow; var availableResolutions = new List <CameraResolution>(); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume _session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.HasValue && CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.HasValue || !CameraPreviewSettings.Instance.ScannerOptions.UseFrontCameraIfAvailable.Value)) { break; //Back camera successfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = CameraPreviewSettings.Instance.ScannerOptions.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { _session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); return(false); } else { _session.AddInput(input); } var startedAvPreviewLayerAlloc = PerformanceCounter.Start(); _previewLayer = new AVCaptureVideoPreviewLayer(_session); PerformanceCounter.Stop(startedAvPreviewLayerAlloc, "Alloc AVCaptureVideoPreviewLayer took {0} ms."); var perf2 = PerformanceCounter.Start(); #if __UNIFIED__ _previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; #else previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; #endif _previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); _previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); _layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)) { AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight }; _layerView.Layer.AddSublayer(_previewLayer); this.AddSubview(_layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); _session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA), CVPixelBuffer.PixelFormatTypeKey); // create a VideoDataOutput and add it to the sesion _output = new AVCaptureVideoDataOutput { WeakVideoSettings = videoSettings }; // configure the output _queue = new DispatchQueue("CamerPreviewView"); // (Guid.NewGuid().ToString()); _outputRecorder = new DefaultOutputRecorder(_resultCallback); _output.AlwaysDiscardsLateVideoFrames = true; _output.SetSampleBufferDelegateQueue(_outputRecorder, _queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); _session.AddOutput(_output); //session.StartRunning (); var perf5 = PerformanceCounter.Start(); if (captureDevice.LockForConfiguration(out var err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Logger.Log("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
bool SetupCaptureSession() { session = new AVCaptureSession () { SessionPreset = AVCaptureSession.PresetMedium }; AVCaptureDevice[] capDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); AVCaptureDeviceInput input = null; if (capDevices.Length != 0) input = AVCaptureDeviceInput.FromDevice (capDevices[0]); if (input == null){ new UIAlertView("Error", "Camera not available", null, "OK", null).Show(); Console.WriteLine ("Camera not available"); return false; } session.AddInput (input); var output = new AVCaptureVideoDataOutput () { VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA), MinFrameDuration = new CMTime (1, 30) //second parameter is frames per second }; queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue"); outputRecorder = new OutputRecorder (); output.SetSampleBufferDelegateAndQueue (outputRecorder, queue); session.AddOutput (output); session.StartRunning (); return true; }
public override bool FinishedLaunching(UIApplication application, NSDictionary launchOptions) { // create a new window instance based on the screen size Window = new UIWindow(UIScreen.MainScreen.Bounds); Microsoft.WindowsAzure.MobileServices.CurrentPlatform.Init(); // If you have defined a root view controller, set it here: initialViewController = Storyboard.InstantiateInitialViewController() as UIViewController; Window.RootViewController = initialViewController; UITabBar.Appearance.SelectedImageTintColor = UIColor.FromRGB(14, 125, 202); UITabBar.Appearance.BackgroundColor = UIColor.White; // make the window visible Window.MakeKeyAndVisible(); // Create a new capture session Session = new AVCaptureSession(); Session.SessionPreset = AVCaptureSession.PresetMedium; // Create a device input CaptureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (CaptureDevice == null) { //throw new Exception("Video recording not supported on this device"); } else { // Prepare device for configuration if (!CaptureDevice.LockForConfiguration(out Error)) { // There has been an issue, abort Console.WriteLine("Error: {0}", Error.LocalizedDescription); CaptureDevice.UnlockForConfiguration(); return(false); } // Configure stream for 15 frames per second (fps) CaptureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); // Unlock configuration CaptureDevice.UnlockForConfiguration(); // Get input from capture device Input = AVCaptureDeviceInput.FromDevice(CaptureDevice); if (Input == null) { // Error, report and abort Console.WriteLine("Unable to gain input from capture device."); CameraAvailable = false; return(false); } // Attach input to session Session.AddInput(Input); // Create a new output var output = new AVCaptureVideoDataOutput(); var settings = new AVVideoSettingsUncompressed(); settings.PixelFormatType = CVPixelFormatType.CV32BGRA; output.WeakVideoSettings = settings.Dictionary; // Configure and attach to the output to the session Queue = new DispatchQueue("ManCamQueue"); Recorder = new OutputRecorder(); output.SetSampleBufferDelegate(Recorder, Queue); Session.AddOutput(output); // Configure and attach a still image output for bracketed capture StillImageOutput = new AVCaptureStillImageOutput(); var dict = new NSMutableDictionary(); dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); Session.AddOutput(StillImageOutput); // Let tabs know that a camera is available CameraAvailable = true; } return(true); }
bool SetupCaptureSession () { // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession () { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); if (captureDevice == null){ Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device"); return false; } var input = AVCaptureDeviceInput.FromDevice (captureDevice); if (input == null){ Console.WriteLine ("No input - this won't work on the simulator, try a physical device"); return false; } else session.AddInput (input); previewLayer = new AVCaptureVideoPreviewLayer(session); //Framerate set here (15 fps) if (previewLayer.RespondsToSelector(new Selector("connection"))) previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10); previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = this.Frame; previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(this.Frame); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); this.AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { this.AddSubview (overlayView); this.BringSubviewToFront (overlayView); //overlayView.LayoutSubviews (); } session.StartRunning (); Console.WriteLine ("RUNNING!!!"); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput () { //videoSettings VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA), }; // configure the output queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = new BarcodeReader(null, (img) => { var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height); //Don't try and rotate properly if we're autorotating anyway if (options.AutoRotate.HasValue && options.AutoRotate.Value) return src; switch (UIDevice.CurrentDevice.Orientation) { case UIDeviceOrientation.Portrait: return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise(); case UIDeviceOrientation.PortraitUpsideDown: return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise(); case UIDeviceOrientation.LandscapeLeft: return src; case UIDeviceOrientation.LandscapeRight: return src; } return src; }, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown)); if (this.options.TryHarder.HasValue) { Console.WriteLine("TRY_HARDER: " + this.options.TryHarder.Value); barcodeReader.Options.TryHarder = this.options.TryHarder.Value; } if (this.options.PureBarcode.HasValue) barcodeReader.Options.PureBarcode = this.options.PureBarcode.Value; if (this.options.AutoRotate.HasValue) { Console.WriteLine("AUTO_ROTATE: " + this.options.AutoRotate.Value); barcodeReader.AutoRotate = this.options.AutoRotate.Value; } if (!string.IsNullOrEmpty (this.options.CharacterSet)) barcodeReader.Options.CharacterSet = this.options.CharacterSet; if (this.options.TryInverted.HasValue) barcodeReader.TryInverted = this.options.TryInverted.Value; if (this.options.PossibleFormats != null && this.options.PossibleFormats.Count > 0) { barcodeReader.Options.PossibleFormats = new List<BarcodeFormat>(); foreach (var pf in this.options.PossibleFormats) barcodeReader.Options.PossibleFormats.Add(pf); } outputRecorder = new OutputRecorder (this.options, img => { try { var started = DateTime.Now; var rs = barcodeReader.Decode(img); var total = DateTime.Now - started; Console.WriteLine("Decode Time: " + total.TotalMilliseconds + " ms"); if (rs != null) resultCallback(rs); } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate (outputRecorder, queue); Console.WriteLine("SetupCamera Finished"); session.AddOutput (output); //session.StartRunning (); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus; if (captureDevice.FocusPointOfInterestSupported) captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); captureDevice.UnlockForConfiguration(); } else Console.WriteLine("Failed to Lock for Config: " + err.Description); } return true; }
bool SetupCaptureSession() { var availableResolutions = new List <CameraResolution>(); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session // var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); AVCaptureDevice captureDevice = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (ScanningOptions.UseFrontCameraIfAvailable.HasValue && ScanningOptions.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!ScanningOptions.UseFrontCameraIfAvailable.HasValue || !ScanningOptions.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { AddSubview(overlayView); BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = ScanningOptions.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { AddSubview(overlayView); BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } foundResult = false; //Detect barcodes with built in avcapture stuff var metadataOutput = new AVCaptureMetadataOutput(); captureDelegate = new CaptureDelegate(metaDataObjects => { if (!analyzing) { return; } var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds; if (msSinceLastPreview < ScanningOptions.DelayBetweenAnalyzingFrames || (wasScanned && msSinceLastPreview < ScanningOptions.DelayBetweenContinuousScans) || working) { return; } try { working = true; wasScanned = false; lastAnalysis = DateTime.UtcNow; var mdo = metaDataObjects.FirstOrDefault(); if (!(mdo is AVMetadataMachineReadableCodeObject readableObj)) { return; } if (readableObj.Type == AVMetadataObjectType.CatBody || readableObj.Type == AVMetadataObjectType.DogBody || readableObj.Type == AVMetadataObjectType.Face || readableObj.Type == AVMetadataObjectType.HumanBody || readableObj.Type == AVMetadataObjectType.SalientObject) { return; } wasScanned = true; var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString()); var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat); resultCallback(rs); } finally { working = false; } }); metadataOutput.SetDelegate(captureDelegate, DispatchQueue.MainQueue); session.AddOutput(metadataOutput); //Setup barcode formats if (ScanningOptions?.PossibleFormats?.Any() ?? false) { var formats = AVMetadataObjectType.None; foreach (var f in ScanningOptions.PossibleFormats) { formats |= AVCaptureBarcodeFormatFromZXingBarcodeFormat(f); } formats &= ~AVMetadataObjectType.None; metadataOutput.MetadataObjectTypes = formats; } else { var availableMetaObjTypes = metadataOutput.AvailableMetadataObjectTypes; metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes; } previewLayer = new AVCaptureVideoPreviewLayer(session); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = new CGRect(0, 0, this.Frame.Width, this.Frame.Height); previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { AddSubview(overlayView); BringSubviewToFront(overlayView); } session.StartRunning(); if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { NSError err = null; if (captureDevice.LockForConfiguration(out err)) { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new CGPoint(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new CGPoint(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } } return(true); }
public bool StartRecording() { try { session = MaybeInitializeSession(); if (session == null) { Failure.Alert("Couldn't initialize session"); return false; } writer = MaybeInitializeAssetWriter(); if (writer == null) { Failure.Alert("Couldn't initialize writer"); return false; } inputWriter = MaybeInitializeInputWriter(); if (inputWriter == null) { Failure.Alert("Couldn't initialize input writer"); return false; } if (!writer.CanAddInput(inputWriter)) { Failure.Alert("Couldn't add input writer to writer"); return false; } writer.AddInput(inputWriter); session.StartRunning(); return true; } catch (Exception x) { Failure.Alert(x.Message); return false; } }
bool SetupCaptureSession() { var started = DateTime.UtcNow; var availableResolutions = new List <CameraResolution>(); var consideredResolutions = new Dictionary <NSString, CameraResolution> { { AVCaptureSession.Preset352x288, new CameraResolution { Width = 352, Height = 288 } }, { AVCaptureSession.PresetMedium, new CameraResolution { Width = 480, Height = 360 } }, //480x360 { AVCaptureSession.Preset640x480, new CameraResolution { Width = 640, Height = 480 } }, { AVCaptureSession.Preset1280x720, new CameraResolution { Width = 1280, Height = 720 } }, { AVCaptureSession.Preset1920x1080, new CameraResolution { Width = 1920, Height = 1080 } } }; // configure the capture session for low resolution, change this if your code // can cope with more data or volume session = new AVCaptureSession() { SessionPreset = AVCaptureSession.Preset640x480 }; // create a device input and attach it to the session var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { captureDevice = device; if (ScanningOptions.UseFrontCameraIfAvailable.HasValue && ScanningOptions.UseFrontCameraIfAvailable.Value && device.Position == AVCaptureDevicePosition.Front) { break; //Front camera successfully set } else if (device.Position == AVCaptureDevicePosition.Back && (!ScanningOptions.UseFrontCameraIfAvailable.HasValue || !ScanningOptions.UseFrontCameraIfAvailable.Value)) { break; //Back camera succesfully set } } if (captureDevice == null) { Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device"); if (overlayView != null) { AddSubview(overlayView); BringSubviewToFront(overlayView); } return(false); } CameraResolution resolution = null; // Find resolution // Go through the resolutions we can even consider foreach (var cr in consideredResolutions) { // Now check to make sure our selected device supports the resolution // so we can add it to the list to pick from if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key)) { availableResolutions.Add(cr.Value); } } resolution = ScanningOptions.GetResolution(availableResolutions); // See if the user selected a resolution if (resolution != null) { // Now get the preset string from the resolution chosen var preset = (from c in consideredResolutions where c.Value.Width == resolution.Width && c.Value.Height == resolution.Height select c.Key).FirstOrDefault(); // If we found a matching preset, let's set it on the session if (!string.IsNullOrEmpty(preset)) { session.SessionPreset = preset; } } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { Console.WriteLine("No input - this won't work on the simulator, try a physical device"); if (overlayView != null) { AddSubview(overlayView); BringSubviewToFront(overlayView); } return(false); } else { session.AddInput(input); } var startedAVPreviewLayerAlloc = PerformanceCounter.Start(); previewLayer = new AVCaptureVideoPreviewLayer(session); PerformanceCounter.Stop(startedAVPreviewLayerAlloc, "Alloc AVCaptureVideoPreviewLayer took {0} ms."); var perf2 = PerformanceCounter.Start(); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Frame = new CGRect(0, 0, Frame.Width, Frame.Height); previewLayer.Position = new CGPoint(Layer.Bounds.Width / 2, (Layer.Bounds.Height / 2)); layerView = new UIView(new CGRect(0, 0, Frame.Width, Frame.Height)); layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight; layerView.Layer.AddSublayer(previewLayer); AddSubview(layerView); ResizePreview(UIApplication.SharedApplication.StatusBarOrientation); if (overlayView != null) { AddSubview(overlayView); BringSubviewToFront(overlayView); } PerformanceCounter.Stop(perf2, "PERF: Setting up layers took {0} ms"); var perf3 = PerformanceCounter.Start(); session.StartRunning(); PerformanceCounter.Stop(perf3, "PERF: session.StartRunning() took {0} ms"); var perf4 = PerformanceCounter.Start(); var videoSettings = NSDictionary.FromObjectAndKey(new NSNumber((int)CVPixelFormatType.CV32BGRA), CVPixelBuffer.PixelFormatTypeKey); // create a VideoDataOutput and add it to the sesion output = new AVCaptureVideoDataOutput { WeakVideoSettings = videoSettings }; // configure the output queue = new DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString()); var barcodeReader = ScanningOptions.BuildBarcodeReader(); outputRecorder = new OutputRecorder(this, img => { var ls = img; if (!IsAnalyzing) { return(false); } try { var perfDecode = PerformanceCounter.Start(); if (shouldRotatePreviewBuffer) { ls = ls.rotateCounterClockwise(); } var result = barcodeReader.Decode(ls); PerformanceCounter.Stop(perfDecode, "Decode Time: {0} ms"); if (result != null) { resultCallback(result); return(true); } } catch (Exception ex) { Console.WriteLine("DECODE FAILED: " + ex); } return(false); }); output.AlwaysDiscardsLateVideoFrames = true; output.SetSampleBufferDelegate(outputRecorder, queue); PerformanceCounter.Stop(perf4, "PERF: SetupCamera Finished. Took {0} ms."); session.AddOutput(output); var perf5 = PerformanceCounter.Start(); if (captureDevice.LockForConfiguration(out var err)) { captureDeviceOriginalConfig = new AVConfigs { FocusMode = captureDevice.FocusMode, ExposureMode = captureDevice.ExposureMode, WhiteBalanceMode = captureDevice.WhiteBalanceMode, AutoFocusRangeRestriction = captureDevice.AutoFocusRangeRestriction, }; if (captureDevice.HasFlash) { captureDeviceOriginalConfig.FlashMode = captureDevice.FlashMode; } if (captureDevice.HasTorch) { captureDeviceOriginalConfig.TorchMode = captureDevice.TorchMode; } if (captureDevice.FocusPointOfInterestSupported) { captureDeviceOriginalConfig.FocusPointOfInterest = captureDevice.FocusPointOfInterest; } if (captureDevice.ExposurePointOfInterestSupported) { captureDeviceOriginalConfig.ExposurePointOfInterest = captureDevice.ExposurePointOfInterest; } if (ScanningOptions.DisableAutofocus) { captureDevice.FocusMode = AVCaptureFocusMode.Locked; } else { if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus; } else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus)) { captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus; } } if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure)) { captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure; } else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose)) { captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose; } if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance; } else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance)) { captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance; } if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported) { captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near; } if (captureDevice.FocusPointOfInterestSupported) { captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f); } if (captureDevice.ExposurePointOfInterestSupported) { captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f); } captureDevice.UnlockForConfiguration(); } else { Console.WriteLine("Failed to Lock for Config: " + err.Description); } PerformanceCounter.Stop(perf5, "PERF: Setup Focus in {0} ms."); return(true); }
public async override void ViewDidLoad() { base.ViewDidLoad(); // Disable UI. The UI is enabled if and only if the session starts running. CameraButton.Enabled = false; RecordButton.Enabled = false; StillButton.Enabled = false; // Create the AVCaptureSession. Session = new AVCaptureSession(); // Setup the preview view. PreviewView.Session = Session; // Communicate with the session and other session objects on this queue. SessionQueue = new DispatchQueue("session queue"); SetupResult = AVCamSetupResult.Success; // Check video authorization status. Video access is required and audio access is optional. // If audio access is denied, audio is not recorded during movie recording. switch (AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video)) { // The user has previously granted access to the camera. case AVAuthorizationStatus.Authorized: break; // The user has not yet been presented with the option to grant video access. // We suspend the session queue to delay session setup until the access request has completed to avoid // asking the user for audio access if video access is denied. // Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup. case AVAuthorizationStatus.NotDetermined: SessionQueue.Suspend(); var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video); if (!granted) { SetupResult = AVCamSetupResult.CameraNotAuthorized; } SessionQueue.Resume(); break; // The user has previously denied access. default: SetupResult = AVCamSetupResult.CameraNotAuthorized; break; } // Setup the capture session. // In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time. // Why not do all of this on the main queue? // Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue // so that the main queue isn't blocked, which keeps the UI responsive. SessionQueue.DispatchAsync(() => { if (SetupResult != AVCamSetupResult.Success) { return; } backgroundRecordingID = -1; NSError error; AVCaptureDevice videoDevice = CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back); AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error); if (videoDeviceInput == null) { Console.WriteLine("Could not create video device input: {0}", error); } Session.BeginConfiguration(); if (Session.CanAddInput(videoDeviceInput)) { Session.AddInput(VideoDeviceInput = videoDeviceInput); DispatchQueue.MainQueue.DispatchAsync(() => { // Why are we dispatching this to the main queue? // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView // can only be manipulated on the main thread. // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation. // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by // ViewWillTransitionToSize method. UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation; AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait; if (statusBarOrientation != UIInterfaceOrientation.Unknown) { initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation; } var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer; previewLayer.Connection.VideoOrientation = initialVideoOrientation; }); } else { Console.WriteLine("Could not add video device input to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error); if (audioDeviceInput == null) { Console.WriteLine("Could not create audio device input: {0}", error); } if (Session.CanAddInput(audioDeviceInput)) { Session.AddInput(audioDeviceInput); } else { Console.WriteLine("Could not add audio device input to the session"); } var movieFileOutput = new AVCaptureMovieFileOutput(); if (Session.CanAddOutput(movieFileOutput)) { Session.AddOutput(MovieFileOutput = movieFileOutput); AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video); if (connection.SupportsVideoStabilization) { connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto; } } else { Console.WriteLine("Could not add movie file output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } var stillImageOutput = new AVCaptureStillImageOutput(); if (Session.CanAddOutput(stillImageOutput)) { stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed { Codec = AVVideoCodec.JPEG }; Session.AddOutput(StillImageOutput = stillImageOutput); } else { Console.WriteLine("Could not add still image output to the session"); SetupResult = AVCamSetupResult.SessionConfigurationFailed; } Session.CommitConfiguration(); }); }
bool SetupCaptureSession() { session = new AVCaptureSession(); AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); if (device == null) { Console.WriteLine("No video camera (in simulator?)"); return false; // simulator? } NSError error = null; AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error); if (input == null) Console.WriteLine("Error: " + error); else session.AddInput(input); AVCaptureMetadataOutput output = new AVCaptureMetadataOutput(); var dg = new CaptureDelegate(this); output.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue); session.AddOutput(output); // This could be any list of supported barcode types output.MetadataObjectTypes = new NSString[] {AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode}; // OR you could just accept "all" with the following line; // output.MetadataObjectTypes = output.AvailableMetadataObjectTypes; // empty // DEBUG: use this if you're curious about the available types // foreach (var t in output.AvailableMetadataObjectTypes) // Console.WriteLine(t); AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session); //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height); previewLayer.Frame = new RectangleF(0, 0, 320, 290); previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString(); View.Layer.AddSublayer (previewLayer); session.StartRunning(); Console.WriteLine("StartRunning"); return true; }
public CameraService() { captureSession = new AVCaptureSession(); }
public void SetupLiveCameraStream() { Debug.WriteLine("SetupLiveCameraStream start"); captureSession = new AVCaptureSession(); if (liveCameraStream == null) { Debug.WriteLine("SetupLiveCameraStream liveCameraStream was null"); liveCameraStream = new UIView(); } var viewLayer = liveCameraStream.Layer; nfloat w = this.View.Frame.Width; nfloat h = this.View.Frame.Height; Debug.WriteLine(" pre w:" + w + ", h:" + h); if (w < h) { h = w; } else if (h < w) { w = h; } Debug.WriteLine("post w:" + w + ", h:" + h); CoreGraphics.CGRect myRect = new CoreGraphics.CGRect(0f, 100f, w, h); //CoreGraphics.CGRect myRect = new CGRect(new CGSize(w, w)); videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { //Frame = this.View.Frame // This does correctly reduce the longer side. // However, it then reduces the shorter side to maintain aspect ratio. oof. Frame = myRect, //VideoGravity = AVLayerVideoGravity.Resize, // default is ResizeAspect which results in a new rectangle VideoGravity = AVLayerVideoGravity.ResizeAspectFill, // default is ResizeAspect }; //videoPreviewLayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; liveCameraStream.Layer.AddSublayer(videoPreviewLayer); //UITapGestureRecognizer tapRecognizer = new UITapGestureRecognizer(PreviewAreaTappedToChangeFocus); //liveCameraStream.AddGestureRecognizer(tapRecognizer); //var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); ConfigureCameraForDevice(captureDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice); if (captureDeviceInput == null) { //NSError err; //captureDeviceInput = new AVCaptureDeviceInput(captureDevice, out err); ExitWithoutPhoto(this, new EventArgs()); return; } captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary(), HighResolutionStillImageOutputEnabled = true, }; captureSession.AddOutput(stillImageOutput); Debug.WriteLine("SetupLiveCameraStream pre running"); captureSession.StartRunning(); Debug.WriteLine("SetupLiveCameraStream end"); }
public override void ViewDidLoad() { base.ViewDidLoad (); weAreRecording = false; lblError.Hidden = true; btnStartRecording.SetTitle("Start Recording", UIControlState.Normal); //Set up session session = new AVCaptureSession (); //Set up inputs and add them to the session //this will only work if using a physical device! Console.WriteLine ("getting device inputs"); try{ //add video capture device device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video); input = AVCaptureDeviceInput.FromDevice (device); session.AddInput (input); //add audio capture device audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio); audioInput = AVCaptureDeviceInput.FromDevice(audioDevice); session.AddInput(audioInput); } catch(Exception ex){ //show the label error. This will always show when running in simulator instead of physical device. lblError.Hidden = false; return; } //Set up preview layer (shows what the input device sees) Console.WriteLine ("setting up preview layer"); previewlayer = new AVCaptureVideoPreviewLayer (session); previewlayer.Frame = this.View.Bounds; //this code makes UI controls sit on top of the preview layer! Allows you to just place the controls in interface builder UIView cameraView = new UIView (); cameraView = new UIView (); cameraView.Layer.AddSublayer (previewlayer); this.View.AddSubview (cameraView); this.View.SendSubviewToBack (cameraView); Console.WriteLine ("Configuring output"); output = new AVCaptureMovieFileOutput (); long totalSeconds = 10000; Int32 preferredTimeScale = 30; CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale); output.MinFreeDiskSpaceLimit = 1024 * 1024; output.MaxRecordedDuration = maxDuration; if (session.CanAddOutput (output)) { session.AddOutput (output); } session.SessionPreset = AVCaptureSession.PresetMedium; Console.WriteLine ("About to start running session"); session.StartRunning (); //toggle recording button was pushed. btnStartRecording.TouchUpInside += startStopPushed; //Console.ReadLine (); }
public override void ViewWillDisappear(bool animated) { base.ViewWillDisappear(animated); TabBarController.TabBar.Hidden = false; if (CaptureSession != null) { CaptureSession.StopRunning(); CaptureSession.Dispose(); CaptureSession = null; } if (enterBackgroundToken != null) { NSNotificationCenter.DefaultCenter.RemoveObserver(enterBackgroundToken); } UIApplication.SharedApplication.SetStatusBarHidden(false, UIStatusBarAnimation.None); }
/// <summary> /// Stops the and tears down the capture session. /// </summary> public void StopAndTearDownCaptureSession () { captureSession.StopRunning (); if (captureSession != null) NSNotificationCenter.DefaultCenter.RemoveObserver (this, AVCaptureSession.DidStopRunningNotification, captureSession); captureSession.Dispose (); captureSession = null; if (previewBufferQueue != null){ previewBufferQueue.Dispose (); previewBufferQueue = null; } if (movieWritingQueue != null){ movieWritingQueue.Dispose (); movieWritingQueue = null; } }
public override void ViewDidLoad() { base.ViewDidLoad(); this.View.BackgroundColor = UIColor.White; NSError error; // Create the session. The AVCaptureSession is the managing instance of the whole video handling. var captureSession = new AVCaptureSession() { // Defines what quality we want to use for the images we grab. Photo gives highest resolutions. SessionPreset = AVCaptureSession.PresetPhoto }; // Find a suitable AVCaptureDevice for video input. var device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); if (device == null) { // This will not work on the iOS Simulator - there is no camera. :-) throw new InvalidProgramException("Failed to get AVCaptureDevice for video input!"); } // Create a device input with the device and add it to the session. var videoInput = AVCaptureDeviceInput.FromDevice(device, out error); if (videoInput == null) { throw new InvalidProgramException("Failed to get AVCaptureDeviceInput from AVCaptureDevice!"); } // Let session read from the input, this is our source. captureSession.AddInput(videoInput); // Create output for the video stream. This is the destination. var videoOutput = new AVCaptureVideoDataOutput() { AlwaysDiscardsLateVideoFrames = true }; // Define the video format we want to use. Note that Xamarin exposes the CompressedVideoSetting and UncompressedVideoSetting // properties on AVCaptureVideoDataOutput un Unified API, but I could not get these to work. The VideoSettings property is deprecated, // so I use the WeakVideoSettings instead which takes an NSDictionary as input. this.videoSettingsDict = new NSMutableDictionary(); this.videoSettingsDict.Add(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromUInt32((uint)CVPixelFormatType.CV32BGRA)); videoOutput.WeakVideoSettings = this.videoSettingsDict; // Create a delegate to report back to us when an image has been captured. // We want to grab the camera stream and feed it through a AVCaptureVideoDataOutputSampleBufferDelegate // which allows us to get notified if a new image is availeble. An implementation of that delegate is VideoFrameSampleDelegate in this project. this.sampleBufferDelegate = new VideoFrameSamplerDelegate(); // Processing happens via Grand Central Dispatch (GCD), so we need to provide a queue. // This is pretty much like a system managed thread (see: http://zeroheroblog.com/ios/concurrency-in-ios-grand-central-dispatch-gcd-dispatch-queues). this.sessionQueue = new DispatchQueue("AVSessionQueue"); // Assign the queue and the delegate to the output. Now all output will go through the delegate. videoOutput.SetSampleBufferDelegateQueue(this.sampleBufferDelegate, this.sessionQueue); // Add output to session. captureSession.AddOutput(videoOutput); // We also want to visualize the input stream. The raw stream can be fed into an AVCaptureVideoPreviewLayer, which is a subclass of CALayer. // A CALayer can be added to a UIView. We add that layer to the controller's main view. var layer = this.View.Layer; this.videoLayer = AVCaptureVideoPreviewLayer.FromSession(captureSession); this.videoLayer.Frame = layer.Bounds; layer.AddSublayer(this.videoLayer); // All setup! Start capturing! captureSession.StartRunning(); // Configure framerate. Kind of weird way of doing it but the only one that works. device.LockForConfiguration(out error); // CMTime constructor means: 1 = one second, DETECTION_FPS = how many samples per unit, which is 1 second in this case. device.ActiveVideoMinFrameDuration = new CMTime(1, DETECTION_FPS); device.ActiveVideoMaxFrameDuration = new CMTime(1, DETECTION_FPS); device.UnlockForConfiguration(); }