예제 #1
0
        void SetupPhotoCapture()
        {
            captureSession.SessionPreset = AVCaptureSession.PresetPhoto;

            // Add photo output.
            photoOutput = new AVCapturePhotoOutput();
            photoOutput.IsHighResolutionCaptureEnabled = true;

            if (captureSession.CanAddOutput(photoOutput))
            {
                captureSession.AddOutput(photoOutput);
            }
        }
        private void InitSession()
        {
            try
            {
                //init capture session
                _AVSession = new AVCaptureSession();

                //check permissions
                var authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);
                if (authorizationStatus != AVAuthorizationStatus.Authorized)
                {
                    return;
                }

                //check capture camera
                var cameras = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
                var camera  = cameras.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back);
                if (camera == null)
                {
                    return;
                }

                //add input to capture session
                _AVDeviceImput = new AVCaptureDeviceInput(camera, out NSError _);
                if (_AVSession.CanAddInput(_AVDeviceImput))
                {
                    _AVSession.AddInput(_AVDeviceImput);
                }
                else
                {
                    return;
                }

                //add output to camera session
                _MetadataObjectsQueue = new DispatchQueue("metadata objects queue");
                _AVMetadataOutput     = new AVCaptureMetadataOutput();
                if (_AVSession.CanAddOutput(_AVMetadataOutput))
                {
                    _AVSession.AddOutput(_AVMetadataOutput);
                }
                else
                {
                    return;
                }
                _AVMetadataOutput.SetDelegate(this, _MetadataObjectsQueue);

                //init the video preview layer and add it to the current view
                _AVVideoPeviewLayer = new AVCaptureVideoPreviewLayer(_AVSession);
                _AVVideoPeviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                _AVVideoPeviewLayer.Frame        = Bounds;
                this.Layer.AddSublayer(_AVVideoPeviewLayer);

                //start capture session
                StartSession(true);
            }
            catch (Exception ex)
            {
                Console.WriteLine("IOS_SCAN | init error", ex);
            }
        }
예제 #3
0
        public SessionSetupResult ConfigureSession(AVCaptureSession session)
        {
            var inputDeviceConfigureResult = _videoDeviceInputManager.ConfigureVideoDeviceInput(session);

            if (inputDeviceConfigureResult != SessionSetupResult.Success)
            {
                return(inputDeviceConfigureResult);
            }

            // Add movie file output.
            Console.WriteLine("capture session: configuring - adding movie file input");

            var movieFileOutput = new AVCaptureMovieFileOutput();

            if (session.CanAddOutput(movieFileOutput))
            {
                session.AddOutput(movieFileOutput);
                _videoFileOutput = movieFileOutput;

                DispatchQueue.MainQueue.DispatchAsync(() =>
                {
                    _videoRecordingDelegate?.DidBecomeReadyForVideoRecording(this);
                });
            }
            else
            {
                Console.WriteLine("capture session: could not add video output to the session");
                return(SessionSetupResult.ConfigurationFailed);
            }

            _audioCaptureSession = new AudioCaptureSession();
            _audioCaptureSession.ConfigureSession(session);

            return(SessionSetupResult.Success);
        }
        private void InitialiseCaptureSession()
        {
            try
            {
                _captureSession.SessionPreset = AVCaptureSession.Preset1920x1080;
                var     captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video) as AVCaptureDevice;
                NSError error;
                var     input = new AVCaptureDeviceInput(captureDevice, out error);
                if (error?.Code != 0)
                {
                    Console.WriteLine($"Error {error.ToString()}");
                }

                if (_captureSession.CanAddInput(input))
                {
                    _captureSession.AddInput(input);
                }

                var videoOutput = new AVCaptureVideoDataOutput();
                videoOutput.SetSampleBufferDelegateQueue(this, new DispatchQueue("sample buffer delegate"));

                if (_captureSession.CanAddOutput(videoOutput))
                {
                    _captureSession.AddOutput(videoOutput);
                }

                _captureSession.StartRunning();
            }
            catch (Exception ex)
            {
                int i = 0;
                i++;
            }
        }
예제 #5
0
        /// <summary>
        /// Start camera preview
        /// </summary>
        public override void StartCamera()
        {
            if (Session == null)
            {
                Session = new AVCaptureSession();

                Device = Configuration.ShowBackCameraFirst
                    ? AVCaptureDevice.Devices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back)
                    : AVCaptureDevice.Devices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front);

                if (Device == null)
                {
                    NoCameraAvailable();
                    Console.WriteLine("Could not find capture device, does your device have a camera?");
                    return;
                }

                try
                {
                    NSError error;
                    VideoInput = new AVCaptureDeviceInput(Device, out error);

                    Session.AddInput(VideoInput);

                    _videoOutput = new AVCaptureMovieFileOutput {
                        MinFreeDiskSpaceLimit = 1024 * 1024
                    };

                    if (Session.CanAddOutput(_videoOutput))
                    {
                        Session.AddOutput(_videoOutput);
                    }

                    if (Configuration.RecordAudio)
                    {
                        var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);

                        _audioInput = new AVCaptureDeviceInput(audioDevice, out error);
                        if (Session.CanAddInput(_audioInput))
                        {
                            Session.AddInput(_audioInput);
                        }
                    }

                    if (Configuration.DetectFaces)
                    {
                        SetupFaceDetection();
                    }

                    SetupVideoPreviewLayer();

                    Session.StartRunning();
                }
                catch { /* ignore */ }

                FlashConfiguration(true);
            }

            base.StartCamera();
        }
예제 #6
0
        public void MetadataObjectTypesTest()
        {
            TestRuntime.AssertSystemVersion(PlatformName.iOS, 8, 0, throwIfOtherPlatform: false);

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            TestRuntime.RequestCameraPermission(AVMediaType.Video, true);

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                switch (val)
                                {
                                case AVMetadataObjectType.CatBody:
                                case AVMetadataObjectType.DogBody:
                                case AVMetadataObjectType.HumanBody:
                                case AVMetadataObjectType.SalientObject:
                                    // fail *and crash* on iOS 8 (at least on 32bits devices)
                                    if (!TestRuntime.CheckXcodeVersion(11, 0))
                                    {
                                        continue;
                                    }
                                    // xcode 12 beta 1 on device
                                    if ((Runtime.Arch == Arch.DEVICE) && TestRuntime.CheckXcodeVersion(12, 0))
                                    {
                                        continue;
                                    }
                                    break;
                                }
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
예제 #7
0
        void ConfigureSession()
        {
            if (setupResult != SessionSetupResult.Success)
            {
                return;
            }

            session.BeginConfiguration();

            var     videoDevice = DeviceWithMediaType(AVMediaType.Video, AVCaptureDevicePosition.Back);
            NSError err;
            var     vDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out err);

            if (err != null)
            {
                Console.WriteLine($"Could not create video device input: ${err}");
                setupResult = SessionSetupResult.ConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            if (session.CanAddInput(vDeviceInput))
            {
                session.AddInput(vDeviceInput);
                videoDeviceInput = vDeviceInput;
            }
            else
            {
                Console.WriteLine("Could not add video device input to the session");
                setupResult = SessionSetupResult.ConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            // Add metadata output.
            if (session.CanAddOutput(metadataOutput))
            {
                session.AddOutput(metadataOutput);

                // Set this view controller as the delegate for metadata objects.
                metadataOutput.SetDelegate(this, metadataObjectsQueue);
                metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes;                 // Use all metadata object types by default.
                metadataOutput.RectOfInterest      = CGRect.Empty;
            }
            else
            {
                Console.WriteLine("Could not add metadata output to the session");
                setupResult = SessionSetupResult.ConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            session.CommitConfiguration();
        }
예제 #8
0
        public void ConfigureDeviceAndStart()
        {
            var device = GetDevice();

            if (device == null)
            {
                return;
            }

            try
            {
                if (device.LockForConfiguration(out var error))
                {
                    if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                    {
                        device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                    }

                    device.UnlockForConfiguration();
                }

                // Configure Input
                var input = AVCaptureDeviceInput.FromDevice(device, out var error2);
                _captureSession.AddInput(input);

                // Configure Output
                var settings = new AVVideoSettingsUncompressed()
                {
                    PixelFormatType = CoreVideo.CVPixelFormatType.CV32BGRA
                };

                var videoOutput = new AVCaptureVideoDataOutput
                {
                    WeakVideoSettings             = settings.Dictionary,
                    AlwaysDiscardsLateVideoFrames = true
                };

                var videoCaptureQueue = new DispatchQueue("Video Queue");
                videoOutput.SetSampleBufferDelegateQueue(new OutputRecorder(View, _shapeLayer), videoCaptureQueue);

                if (_captureSession.CanAddOutput(videoOutput))
                {
                    _captureSession.AddOutput(videoOutput);
                }

                // Start session
                _captureSession.StartRunning();
            }
            catch (Exception e)
            {
                Console.Write(e);
            }
        }
예제 #9
0
        public void MetadataObjectTypesTest()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Ignore("Test only works correctly in iOS 8+");
            }

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            var auth = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            switch (auth)
            {
            case AVAuthorizationStatus.Restricted:
            case AVAuthorizationStatus.Denied:
                Assert.Fail("This test requires access to the camera, but the app has been denied access.");
                break;
            }

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
예제 #10
0
        public SessionSetupResult ConfigureSession(AVCaptureSession session,
                                                   SessionPresetConfiguration presetConfiguration)
        {
            _presetConfiguration  = presetConfiguration;
            session.SessionPreset = AVCaptureSession.PresetPhoto;

            var inputDeviceConfigureResult = _videoDeviceInputManager.ConfigureVideoDeviceInput(session);

            if (inputDeviceConfigureResult != SessionSetupResult.Success)
            {
                return(inputDeviceConfigureResult);
            }

            if (!session.CanAddOutput(_photoOutput))
            {
                Console.WriteLine("capture session: could not add photo output to the session");
                return(SessionSetupResult.ConfigurationFailed);
            }

            session.AddOutput(_photoOutput);
            _photoOutput.IsHighResolutionCaptureEnabled = true;

            ConfigureLivePhoto(session);

            _videoDataOutput = new AVCaptureVideoDataOutput();
            if (session.CanAddOutput(_videoDataOutput))
            {
                _videoDataOutput.AlwaysDiscardsLateVideoFrames = true;

                session.AddOutput(_videoDataOutput);
            }
            else
            {
                Console.WriteLine("capture session: warning - could not add video data output to the session");
            }

            return(SessionSetupResult.Success);
        }
예제 #11
0
        private void SetupLiveCameraStream()
        {
            _captureSession = new AVCaptureSession();

            AVCaptureDevice captureDevice;

            captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back);

            if (captureDevice == null)
            {
                captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            }

            ConfigureCameraForDevice(captureDevice);
            _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            if (!_captureSession.CanAddInput(_captureDeviceInput))
            {
                return;
            }

            _capturePhotoOutput = new AVCapturePhotoOutput();
            _capturePhotoOutput.IsHighResolutionCaptureEnabled = true;
            _capturePhotoOutput.IsLivePhotoCaptureEnabled      = false;


            if (!_captureSession.CanAddOutput(_capturePhotoOutput))
            {
                return;
            }

            _captureSession.BeginConfiguration();

            _captureSession.SessionPreset = AVCaptureSession.PresetPhoto;
            _captureSession.AddInput(_captureDeviceInput);
            _captureSession.AddOutput(_capturePhotoOutput);

            _captureSession.CommitConfiguration();

            _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession)
            {
                Frame        = liveCameraStream.Frame,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
            };

            liveCameraStream.Layer.AddSublayer(_videoPreviewLayer);

            _captureSession.StartRunning();
        }
예제 #12
0
        void PrepareSession()
        {
            session = new AVCaptureSession();
            var captureDevice = frontCamera;

            if (session == null || captureDevice == null)
            {
                return;
            }

            try {
                var deviceInput = new AVCaptureDeviceInput(captureDevice, out var deviceInputError);
                if (deviceInputError != null)
                {
                    throw new NSErrorException(deviceInputError);
                }

                session.BeginConfiguration();

                if (session.CanAddInput(deviceInput))
                {
                    session.AddInput(deviceInput);
                }

                var output = new AVCaptureVideoDataOutput {
                    UncompressedVideoSetting = new AVVideoSettingsUncompressed {
                        PixelFormatType = CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange
                    },
                    AlwaysDiscardsLateVideoFrames = true
                };

                if (session.CanAddOutput(output))
                {
                    session.AddOutput(output);
                }

                session.CommitConfiguration();

                var queue = new DispatchQueue("output.queue");
                output.SetSampleBufferDelegateQueue(this, queue);

                Console.WriteLine($"PrepareSession: Done setting up delegate");
            } catch (Exception ex) {
                Console.WriteLine($"PrepareSession Error: {ex.Message}");
            }
        }
예제 #13
0
        public void MetadataObjectTypesTest()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(8, 0))
            {
                Assert.Ignore("Test only works correctly in iOS 8+");
            }

            if (Runtime.Arch != Arch.DEVICE)
            {
                Assert.Ignore("This test only runs on device (requires camera access)");
            }

            TestRuntime.RequestCameraPermission(AVMediaType.Video, true);

            using (var captureSession = new AVCaptureSession()) {
                using (var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video)) {
                    NSError error;
                    using (var videoInput = new AVCaptureDeviceInput(videoDevice, out error)) {
                        if (captureSession.CanAddInput(videoInput))
                        {
                            captureSession.AddInput(videoInput);
                        }

                        using (var metadataOutput = new AVCaptureMetadataOutput()) {
                            if (captureSession.CanAddOutput(metadataOutput))
                            {
                                captureSession.AddOutput(metadataOutput);
                            }

                            AVMetadataObjectType all = AVMetadataObjectType.None;
                            foreach (AVMetadataObjectType val in Enum.GetValues(typeof(AVMetadataObjectType)))
                            {
                                metadataOutput.MetadataObjectTypes = val;
                                all |= val;
                                Assert.AreEqual(val, metadataOutput.MetadataObjectTypes, val.ToString());
                            }
                            metadataOutput.MetadataObjectTypes = all;
                            Assert.AreEqual(all, metadataOutput.MetadataObjectTypes, all.ToString());
                        }
                    }
                }
            }
        }
예제 #14
0
        void SettupCaptureSession()
        {
            _captureSession = new AVCaptureSession();
            var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var captureDevice  = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaTypes.Video, cameraPosition);

            if (captureDevice != null)
            {
                var input = new AVCaptureDeviceInput(captureDevice, out var error);
                if (error == null)
                {
                    if (_captureSession.CanAddInput(input))
                    {
                        _captureSession.AddInput(input);
                    }
                }
                if (_captureSession.CanAddOutput(_photoOutput))
                {
                    _captureSession.AddOutput(_photoOutput);
                }
                _cameraLayer              = new AVCaptureVideoPreviewLayer(_captureSession);
                _cameraLayer.Frame        = this.Bounds;
                _cameraLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                this.Layer.AddSublayer(_cameraLayer);

                //Turn on flash
                if (captureDevice.HasTorch)
                {
                    captureDevice.LockForConfiguration(out var err);
                    if (err == null)
                    {
                        if (captureDevice.TorchMode == AVCaptureTorchMode.Off)
                        {
                            captureDevice.TorchMode = AVCaptureTorchMode.On;
                            captureDevice.FlashMode = AVCaptureFlashMode.On;
                        }
                        captureDevice.SetTorchModeLevel(1.0f, out var _);
                        captureDevice.UnlockForConfiguration();
                    }
                }
                _captureSession.StartRunning();
            }
        }
예제 #15
0
        void setupCaptureSession()
        {
            if (CaptureSession != null)
            {
                return;
            }

            CaptureSession = new AVCaptureSession();

            NSNotificationCenter.DefaultCenter.AddObserver(null, captureSessionNotification, CaptureSession);

            applicationWillEnterForegroundNotificationObserver =
                NSNotificationCenter.DefaultCenter.AddObserver(UIApplication.WillEnterForegroundNotification.ToString(),
                                                               UIApplication.SharedApplication,
                                                               NSOperationQueue.CurrentQueue, delegate(NSNotification notification) {
                applicationWillEnterForeground();
            });

            videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            NSError error;

            videoInput = new AVCaptureDeviceInput(videoDevice, out error);
            if (CaptureSession.CanAddInput(videoInput))
            {
                CaptureSession.AddInput(videoInput);
            }

            metadataOutput = new AVCaptureMetadataOutput();

            var metadataQueue = new DispatchQueue("com.AVCam.metadata");

            metadataObjectsDelegate = new MetadataObjectsDelegate {
                DidOutputMetadataObjectsAction = DidOutputMetadataObjects
            };
            metadataOutput.SetDelegate(metadataObjectsDelegate, metadataQueue);

            if (CaptureSession.CanAddOutput(metadataOutput))
            {
                CaptureSession.AddOutput(metadataOutput);
            }
        }
        public void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice != null)
            {
                ConfigureCameraForDevice(captureDevice);
                captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

                var dictionary = new NSMutableDictionary();
                dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
                stillImageOutput             = new AVCaptureStillImageOutput()
                {
                    OutputSettings = new NSDictionary()
                };

                captureSession.AddOutput(stillImageOutput);
                captureSession.AddInput(captureDeviceInput);
                var captureAudioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                if (captureAudioDevice != null)
                {
                    audioInput = AVCaptureDeviceInput.FromDevice(captureAudioDevice);
                    captureSession.AddInput(audioInput);
                }

                if (captureSession.CanAddOutput(output))
                {
                    captureSession.AddOutput(output);
                }

                captureSession.StartRunning();
            }
        }
        void setupAVCapture()
        {
            NSError error = null;

            AVCaptureSession session = new AVCaptureSession();

            if (UIDevice.CurrentDevice.UserInterfaceIdiom == UIUserInterfaceIdiom.Phone)
            {
                session.SessionPreset = AVCaptureSession.Preset640x480;
            }
            else
            {
                session.SessionPreset = AVCaptureSession.PresetPhoto;
            }

            // Select a video device, make an input
            AVCaptureDevice device = null;

            AVCaptureDevicePosition desiredPosition = AVCaptureDevicePosition.Front;

            // find the front facing camera
            foreach (AVCaptureDevice d in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video))
            {
                if (d.Position == desiredPosition)
                {
                    device = d;
                    this.isUsingFrontFacingCamera = true;
                    break;
                }
            }
            // fall back to the default camera.
            if (device == null)
            {
                this.isUsingFrontFacingCamera = false;
                device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            }

            // get the input device
            AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice(device, out error);

            if (error == null)
            {
                // add the input to the session
                if (session.CanAddInput(deviceInput))
                {
                    session.AddInput(deviceInput);
                }


                // Make a video data output
                this.videoDataOutput = new AVCaptureVideoDataOutput();

                // we want BGRA, both CoreGraphics and OpenGL work well with 'BGRA'
                NSDictionary rgbOutputSettings = new NSDictionary(
                    CVPixelBuffer.PixelFormatTypeKey,
                    CVPixelFormatType.CV32BGRA
                    );

                this.videoDataOutput.WeakVideoSettings             = rgbOutputSettings;
                this.videoDataOutput.AlwaysDiscardsLateVideoFrames = true;                 // discard if the data output queue is blocked

                // create a serial dispatch queue used for the sample buffer delegate
                // a serial dispatch queue must be used to guarantee that video frames will be delivered in order
                // see the header doc for setSampleBufferDelegate:queue: for more information
                this.videoDataOutputQueue = new DispatchQueue("VideoDataOutputQueue");
                this.videoDataOutput.SetSampleBufferDelegate(new CustomAVCaptureVideoDataOutputSampleBufferDelegate(this), this.videoDataOutputQueue);

                if (session.CanAddOutput(this.videoDataOutput))
                {
                    session.AddOutput(this.videoDataOutput);
                }

                // get the output for doing face detection.
                this.videoDataOutput.ConnectionFromMediaType(AVMediaType.Video).Enabled = true;

                this.previewLayer = new AVCaptureVideoPreviewLayer(session);
                this.previewLayer.BackgroundColor = UIColor.Black.CGColor;
                this.previewLayer.VideoGravity    = AVLayerVideoGravity.ResizeAspect;

                CALayer rootLayer = this.previewView.Layer;
                rootLayer.MasksToBounds = true;
                this.previewLayer.Frame = rootLayer.Bounds;
                rootLayer.AddSublayer(this.previewLayer);
                session.StartRunning();
            }
            session = null;
            if (error != null)
            {
                UIAlertView alertView = new UIAlertView(
                    "Failed with error " + (int)error.Code,
                    error.LocalizedDescription,
                    null,
                    "Dismiss",
                    null);
                alertView.Show();
                this.teardownAVCapture();
            }
        }
예제 #18
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            weAreRecording  = false;
            lblError.Hidden = true;

            btnStartRecording.SetTitle("Start Recording", UIControlState.Normal);

            //Set up session
            session = new AVCaptureSession();


            //Set up inputs and add them to the session
            //this will only work if using a physical device!

            Console.WriteLine("getting device inputs");
            try{
                //add video capture device
                device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                input  = AVCaptureDeviceInput.FromDevice(device);
                session.AddInput(input);

                //add audio capture device
                audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                audioInput  = AVCaptureDeviceInput.FromDevice(audioDevice);
                session.AddInput(audioInput);
            }
            catch (Exception ex) {
                //show the label error.  This will always show when running in simulator instead of physical device.
                lblError.Hidden = false;
                return;
            }



            //Set up preview layer (shows what the input device sees)
            Console.WriteLine("setting up preview layer");
            previewlayer       = new AVCaptureVideoPreviewLayer(session);
            previewlayer.Frame = this.View.Bounds;

            //this code makes UI controls sit on top of the preview layer!  Allows you to just place the controls in interface builder
            UIView cameraView = new UIView();

            cameraView = new UIView();
            cameraView.Layer.AddSublayer(previewlayer);
            this.View.AddSubview(cameraView);
            this.View.SendSubviewToBack(cameraView);

            Console.WriteLine("Configuring output");
            output = new AVCaptureMovieFileOutput();

            long   totalSeconds       = 10000;
            Int32  preferredTimeScale = 30;
            CMTime maxDuration        = new CMTime(totalSeconds, preferredTimeScale);

            output.MinFreeDiskSpaceLimit = 1024 * 1024;
            output.MaxRecordedDuration   = maxDuration;

            if (session.CanAddOutput(output))
            {
                session.AddOutput(output);
            }

            session.SessionPreset = AVCaptureSession.PresetMedium;

            Console.WriteLine("About to start running session");

            session.StartRunning();

            //toggle recording button was pushed.
            btnStartRecording.TouchUpInside += startStopPushed;


            //Console.ReadLine ();
        }
	    bool SetupCaptureSession ()
		{
			//Console.WriteLine ("SetupCaptureSession");
			// Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
			// is used to deliver both audio and video buffers, and our video processing consistently takes
			// too long, the delivery queue can back up, resulting in audio being dropped.
			// 
			// When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
			// that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
			//				
			// RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.
			
			// Create Capture session
			captureSession = new AVCaptureSession ();
			captureSession.BeginConfiguration ();
			
			// Create audio connection
			NSError error;
			var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
			if (audioDevice == null)
				return false; // e.g. simulator

			var audioIn = new AVCaptureDeviceInput (audioDevice, out error);
			if (captureSession.CanAddInput (audioIn))
				captureSession.AddInput (audioIn);
			
			var audioOut = new AVCaptureAudioDataOutput ();
			var audioCaptureQueue = new DispatchQueue ("Audio Capture Queue");

			// Add the Delegate to capture each sample that comes through
			audioOut.SetSampleBufferDelegateQueue (this, audioCaptureQueue);
			
			if (captureSession.CanAddOutput (audioOut))
				captureSession.AddOutput (audioOut);
			
			audioConnection = audioOut.ConnectionFromMediaType (AVMediaType.Audio);
					
			// Create Video Session
			var videoDevice = VideoDeviceWithPosition (AVCaptureDevicePosition.Back);
			var videoIn = new AVCaptureDeviceInput (videoDevice, out error);
			
			if (captureSession.CanAddInput (videoIn))
				captureSession.AddInput (videoIn);
			
			// RosyWriter prefers to discard late video frames early in the capture pipeline, since its
			// processing can take longer than real-time on some platforms (such as iPhone 3GS).
			// Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
			// alwaysDiscardsLateVideoFrames property to NO.
			var videoOut = new AVCaptureVideoDataOutput {
				AlwaysDiscardsLateVideoFrames = true,
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};
			
			// Create a DispatchQueue for the Video Processing
			var videoCaptureQueue = new DispatchQueue ("Video Capture Queue");
			videoOut.SetSampleBufferDelegateQueue (this, videoCaptureQueue);
			
			if (captureSession.CanAddOutput (videoOut))
				captureSession.AddOutput (videoOut);
			
			// Set the Video connection from the Video Output object
			videoConnection = videoOut.ConnectionFromMediaType (AVMediaType.Video);
			videoOrientation = videoConnection.VideoOrientation;
			
			captureSession.CommitConfiguration ();
			
			return true;
		}
예제 #20
0
        // Call this on the session queue.
        void ConfigureSession()
        {
            if (setupResult != AVCamSetupResult.Success)
            {
                return;
            }

            NSError error = null;

            session.BeginConfiguration();

            /*
             *      We do not create an AVCaptureMovieFileOutput when setting up the session because the
             *      AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto.
             */
            session.SessionPreset = AVCaptureSession.PresetPhoto;

            // Add video input.

            // Choose the back dual camera if available, otherwise default to a wide angle camera.
            var videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back);

            if (videoDevice == null)
            {
                // If the back dual camera is not available, default to the back wide angle camera.
                videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back);

                // In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera.
                if (videoDevice == null)
                {
                    videoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front);
                }
            }
            var lVideoDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out error);

            if (lVideoDeviceInput == null)
            {
                Console.WriteLine($"Could not create video device input: {error}");
                setupResult = AVCamSetupResult.SessionConfigurationFailed;
                session.CommitConfiguration();
                return;
            }
            if (session.CanAddInput(lVideoDeviceInput))
            {
                session.AddInput(lVideoDeviceInput);
                videoDeviceInput = lVideoDeviceInput;

                DispatchQueue.MainQueue.DispatchAsync(() =>
                {
                    /*
                     *      Why are we dispatching this to the main queue?
                     *      Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView
                     *      can only be manipulated on the main thread.
                     *      Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
                     *      on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                     *
                     *      Use the status bar orientation as the initial video orientation. Subsequent orientation changes are
                     *      handled by -[AVCamCameraViewController viewWillTransitionToSize:withTransitionCoordinator:].
                     */
                    var statusBarOrientation    = UIApplication.SharedApplication.StatusBarOrientation;
                    var initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
                    if (statusBarOrientation != UIInterfaceOrientation.Unknown)
                    {
                        initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
                    }

                    VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation;
                });
            }
            else
            {
                Console.WriteLine(@"Could not add video device input to the session");
                setupResult = AVCamSetupResult.SessionConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            // Add audio input.
            var audioDevice      = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio);
            var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error);

            if (audioDeviceInput == null)
            {
                Console.WriteLine($"Could not create audio device input: {error}");
            }
            if (session.CanAddInput(audioDeviceInput))
            {
                session.AddInput(audioDeviceInput);
            }
            else
            {
                Console.WriteLine(@"Could not add audio device input to the session");
            }

            // Add photo output.
            var lPhotoOutput = new AVCapturePhotoOutput();

            if (session.CanAddOutput(lPhotoOutput))
            {
                session.AddOutput(lPhotoOutput);
                photoOutput = lPhotoOutput;

                photoOutput.IsHighResolutionCaptureEnabled = true;
                photoOutput.IsLivePhotoCaptureEnabled      = photoOutput.IsLivePhotoCaptureSupported;
                //photoOutput.IsDepthDataDeliveryEnabled(photoOutput.IsDepthDataDeliverySupported());

                livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? AVCamLivePhotoMode.On : AVCamLivePhotoMode.Off;
                //depthDataDeliveryMode = photoOutput.IsDepthDataDeliverySupported() ? AVCamDepthDataDeliveryMode.On : AVCamDepthDataDeliveryMode.Off;

                inProgressPhotoCaptureDelegates  = new Dictionary <long, AVCamPhotoCaptureDelegate>();
                inProgressLivePhotoCapturesCount = 0;
            }
            else
            {
                Console.WriteLine(@"Could not add photo output to the session");
                setupResult = AVCamSetupResult.SessionConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            backgroundRecordingId = UIApplication.BackgroundTaskInvalid;

            session.CommitConfiguration();
        }
예제 #21
0
        public void SetupSession()
        {
            videoPreviewLayer.Session = captureSession;
            videoPreviewLayer.Frame   = liveCameraStream.Bounds;
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = GetBackCamera();

            ConfigureCameraForDevice(captureDevice);
            NSError err;

            videoDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice, out err);
            videoDataOutput  = new AVCaptureVideoDataOutput
            {
                AlwaysDiscardsLateVideoFrames = true
            };
            DispatchQueue queue = new DispatchQueue("dbrcameraQueue");

            if (captureSession.CanAddInput(videoDeviceInput))
            {
                captureSession.AddInput(videoDeviceInput);
                DispatchQueue.MainQueue.DispatchAsync(() =>
                {
                    var initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
                    var statusBarOrientation    = UIApplication.SharedApplication.StatusBarOrientation;
                    if (statusBarOrientation != UIInterfaceOrientation.Unknown)
                    {
                        AVCaptureVideoOrientation videoOrintation;
                        if (Enum.TryParse(statusBarOrientation.ToString(), out videoOrintation))
                        {
                            initialVideoOrientation = videoOrintation;
                        }
                    }
                    videoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation;
                });
            }
            else if (err != null)
            {
                Console.WriteLine($"Could not create video device input: {err}");
                //this.setupResult = SessionSetupResult.ConfigurationFailed;
                this.captureSession.CommitConfiguration();
                return;
            }
            else
            {
                Console.WriteLine("Could not add video device input to the session");
                //this.setupResult = SessionSetupResult.ConfigurationFailed;
                this.captureSession.CommitConfiguration();
                return;
            }

            if (captureSession.CanAddOutput(videoDataOutput))
            {
                captureSession.AddOutput(videoDataOutput);
                captureOutput.reader = reader;
                captureOutput.update = ResetResults;

                videoDataOutput.SetSampleBufferDelegateQueue(captureOutput, queue);
                videoDataOutput.WeakVideoSettings = new NSDictionary <NSString, NSObject>(CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromInt32((int)CVPixelFormatType.CV32BGRA));
            }
            else
            {
                Console.WriteLine("Could not add metadata output to the session");
                //this.setupResult = SessionSetupResult.ConfigurationFailed;
                captureSession.CommitConfiguration();

                return;
            }
            captureSession.CommitConfiguration();
        }
        public override void ViewDidAppear(bool animated)
        {
            base.ViewDidAppear(animated);
            weAreRecording = false;


            btnStartRecording       = UIButton.FromType(UIButtonType.Custom);
            btnStartRecording.Frame = new RectangleF(100, 100, 60, 50);
            btnStartRecording.SetImage(UIImage.FromFile("captureButton.png"), UIControlState.Normal);

            btnStartRecording.SetTitle("Start Recording", UIControlState.Normal);

            var screenSize   = UIScreen.MainScreen.Bounds;
            var screenWidth  = screenSize.Width;
            var screenHeight = screenSize.Height;

            activityIndicator        = new UIActivityIndicatorView();
            activityIndicator.Frame  = new RectangleF(100, 100, 60, 50);
            activityIndicator.Center = new CGPoint(screenWidth / 2, screenHeight / 2);

            btnStartRecording.Center = new CGPoint(screenWidth / 2, screenHeight - 40);

            //Set up session
            session = new AVCaptureSession();

            btnCancelPage       = UIButton.FromType(UIButtonType.InfoLight);
            btnCancelPage.Frame = new RectangleF(200, 200, 160, 150);
            btnCancelPage.SetImage(UIImage.FromFile("icon_closemap.png"), UIControlState.Normal);

            btnCancelPage.Center = new CGPoint(15, 30);

            //Set up inputs and add them to the session
            //this will only work if using a physical device!

            Console.WriteLine("getting device inputs");
            try
            {
                //add video capture device
                device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                input  = AVCaptureDeviceInput.FromDevice(device);
                session.AddInput(input);

                //add audio capture device
                audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                audioInput  = AVCaptureDeviceInput.FromDevice(audioDevice);
                session.AddInput(audioInput);
            }
            catch (Exception ex)
            {
                //show the label error.  This will always show when running in simulator instead of physical device.
                //lblError.Hidden = false;
                return;
            }

            //Set up preview layer (shows what the input device sees)
            Console.WriteLine("setting up preview layer");
            previewlayer       = new AVCaptureVideoPreviewLayer(session);
            previewlayer.Frame = this.View.Bounds;

            //this code makes UI controls sit on top of the preview layer!  Allows you to just place the controls in interface builder
            cameraView = new UIView();
            cameraView.Layer.AddSublayer(previewlayer);
            this.View.AddSubview(cameraView);
            this.View.SendSubviewToBack(cameraView);

            Console.WriteLine("Configuring output");
            output = new AVCaptureMovieFileOutput();

            long   totalSeconds       = 10000;
            Int32  preferredTimeScale = 30;
            CMTime maxDuration        = new CMTime(totalSeconds, preferredTimeScale);

            output.MinFreeDiskSpaceLimit = 1024 * 1024;
            output.MaxRecordedDuration   = maxDuration;

            if (session.CanAddOutput(output))
            {
                session.AddOutput(output);
            }

            session.SessionPreset = AVCaptureSession.Preset640x480;

            Console.WriteLine("About to start running session");

            session.StartRunning();

            //toggle recording button was pushed.
            btnStartRecording.TouchUpInside += startStopPushed;

            btnCancelPage.TouchUpInside += (s, e) =>
            {
                (Element as CameraPage).Navigation.PopAsync();
                if (session.Running == true)
                {
                    session.StopRunning();
                }

                //session = null;
                session.RemoveInput(input);
                session.RemoveInput(audioInput);
                session.Dispose();
                DismissViewController(true, null);
            };
            View.AddSubview(btnCancelPage);

            View.AddSubview(btnStartRecording);

            timerLabel = new UILabel(new RectangleF(50, 50, 50, 50))
            {
                TextColor = UIColor.White
            };
            timerLabel.Text   = "00:" + videoLength;
            timerLabel.Center = new CGPoint(screenWidth / 2, 30);

            timerLabel.TextColor = UIColor.White;
            View.AddSubview(timerLabel);
        }
예제 #23
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad ();
            weAreRecording = false;
            lblError.Hidden = true;

            btnStartRecording.SetTitle("Start Recording", UIControlState.Normal);

            //Set up session
            session = new AVCaptureSession ();

            //Set up inputs and add them to the session
            //this will only work if using a physical device!

            Console.WriteLine ("getting device inputs");
            try{
                //add video capture device
                device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
                input = AVCaptureDeviceInput.FromDevice (device);
                session.AddInput (input);

                //add audio capture device
                audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                audioInput = AVCaptureDeviceInput.FromDevice(audioDevice);
                session.AddInput(audioInput);

            }
            catch(Exception ex){
                //show the label error.  This will always show when running in simulator instead of physical device.
                lblError.Hidden = false;
                return;
            }

            //Set up preview layer (shows what the input device sees)
            Console.WriteLine ("setting up preview layer");
            previewlayer = new AVCaptureVideoPreviewLayer (session);
            previewlayer.Frame = this.View.Bounds;

            //this code makes UI controls sit on top of the preview layer!  Allows you to just place the controls in interface builder
            UIView cameraView = new UIView ();
            cameraView = new UIView ();
            cameraView.Layer.AddSublayer (previewlayer);
            this.View.AddSubview (cameraView);
            this.View.SendSubviewToBack (cameraView);

            Console.WriteLine ("Configuring output");
            output = new AVCaptureMovieFileOutput ();

            long totalSeconds = 10000;
            Int32 preferredTimeScale = 30;
            CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale);
            output.MinFreeDiskSpaceLimit = 1024 * 1024;
            output.MaxRecordedDuration = maxDuration;

            if (session.CanAddOutput (output)) {
                session.AddOutput (output);
            }

            session.SessionPreset = AVCaptureSession.PresetMedium;

            Console.WriteLine ("About to start running session");

            session.StartRunning ();

            //toggle recording button was pushed.
            btnStartRecording.TouchUpInside += startStopPushed;

            //Console.ReadLine ();
        }
        public void SetupCamera()
        {
            AVCaptureDevice captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaTypes.Video, AVCaptureDevicePosition.Back);

            if (captureDevice == null)
            {
                Console.WriteLine("Could not create capture device");
                return;
            }
            CaptureDevice = captureDevice;
            if (captureDevice.SupportsAVCaptureSessionPreset(AVCaptureSession.Preset3840x2160))
            {
                captureSession.SessionPreset = AVCaptureSession.Preset3840x2160;
                BufferAspectRatio            = 3840.0 / 2160.0;
            }
            else
            {
                captureSession.SessionPreset = AVCaptureSession.Preset1920x1080;
                BufferAspectRatio            = 1920.0 / 1080.0;
            }

            AVCaptureDeviceInput deviceInput = new AVCaptureDeviceInput(captureDevice, out NSError deviceInputErr);

            if (deviceInputErr != null)
            {
                Console.WriteLine("Could not create device input");
                deviceInputErr.Dispose();
                return;
            }
            if (captureSession.CanAddInput(deviceInput))
            {
                captureSession.AddInput(deviceInput);
            }

            VideoDataOutput.AlwaysDiscardsLateVideoFrames = true;
            VideoDataOutput.SetSampleBufferDelegateQueue(this, VideoDataOutputQueue);
            //VideoDataOutput.WeakVideoSettings = new NSDictionary<NSString, NSString> ();
            //VideoDataOutput.WeakVideoSettings.TryAdd<NSString, NSString> (CVPixelBuffer.PixelFormatTypeKey, OSType);

            if (captureSession.CanAddOutput(VideoDataOutput))
            {
                captureSession.AddOutput(VideoDataOutput);
                VideoDataOutput.ConnectionFromMediaType(AVMediaType.Video).PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Off;
            }
            else
            {
                Console.WriteLine("Could not add VDO output");
            }

            _ = captureDevice.LockForConfiguration(out NSError lockConf);
            if (lockConf != null)
            {
                Console.WriteLine("Could not set zoom level due to error: " + lockConf);
                lockConf.Dispose();
                return;
            }
            captureDevice.VideoZoomFactor           = 2;
            captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
            captureDevice.UnlockForConfiguration();

            captureSession.StartRunning();
        }
예제 #25
0
		void setupCaptureSession ()
		{
			if (CaptureSession != null)
				return;

			CaptureSession = new AVCaptureSession ();

			NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession);

			applicationWillEnterForegroundNotificationObserver = 
				NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (),
			                                                    UIApplication.SharedApplication,
					NSOperationQueue.CurrentQueue, delegate(NSNotification notification) {
				applicationWillEnterForeground ();                                          	
			});

			videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);

			NSError error;
			videoInput = new AVCaptureDeviceInput (videoDevice, out error);
			if (CaptureSession.CanAddInput (videoInput))
				CaptureSession.AddInput (videoInput);

			metadataOutput = new AVCaptureMetadataOutput ();

			var metadataQueue = new DispatchQueue ("com.AVCam.metadata");
			metadataObjectsDelegate = new MetadataObjectsDelegate {
				DidOutputMetadataObjectsAction = DidOutputMetadataObjects
			};
			metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue);

			if (CaptureSession.CanAddOutput (metadataOutput))
				CaptureSession.AddOutput (metadataOutput);
		}
예제 #26
0
        protected virtual void SetupAVCapture()
        {
            AVCaptureDeviceInput deviceInput;

            // Select a video device, make an input
            var videoDevice = AVCaptureDeviceDiscoverySession.Create(
                new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera },
                AVMediaType.Video,
                AVCaptureDevicePosition.Back
                ).Devices.FirstOrDefault();

            deviceInput = new AVCaptureDeviceInput(videoDevice, out NSError error);
            if (error != null)
            {
                Console.WriteLine($"Could not create video device input: {error.LocalizedDescription}");
                return;
            }

            session.BeginConfiguration();
            session.SessionPreset = AVCaptureSession.Preset640x480; // Model image size is smaller

            // Add a video input
            if (!session.CanAddInput(deviceInput))
            {
                Console.WriteLine("Could not add video device input to the session");
                session.CommitConfiguration();
                return;
            }
            session.AddInput(deviceInput);

            if (session.CanAddOutput(videoDataOutput))
            {
                session.AddOutput(videoDataOutput);
                // Add a video data ouptut
                videoDataOutput.AlwaysDiscardsLateVideoFrames = true;
                videoDataOutput.WeakVideoSettings             = new NSDictionary(CVPixelBuffer.PixelFormatTypeKey, CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange);
                videoDataOutput.SetSampleBufferDelegateQueue(this, videoDataOutputQueue);
            }
            else
            {
                Console.WriteLine("Could not add video data output to the session");
                session.CommitConfiguration();
                return;
            }

            var captureConnection = videoDataOutput.ConnectionFromMediaType(AVMediaType.Video);

            // Always process the frames
            captureConnection.Enabled = true;
            videoDevice.LockForConfiguration(out NSError error2);
            if (error2 == null)
            {
                var formatDescription        = videoDevice.ActiveFormat.FormatDescription as CMVideoFormatDescription;
                CMVideoDimensions dimensions = formatDescription.Dimensions;
                bufferSize.Width  = dimensions.Width;
                bufferSize.Height = dimensions.Height;
                videoDevice.UnlockForConfiguration();
            }
            else
            {
                Console.WriteLine($"{error2.LocalizedDescription}");
            }
            session.CommitConfiguration();
            previewLayer = AVCaptureVideoPreviewLayer.FromSession(session);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            rootLayer          = previewView.Layer;
            previewLayer.Frame = rootLayer.Bounds;
            rootLayer.AddSublayer(previewLayer);
        }
		public async override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			// Disable UI. The UI is enabled if and only if the session starts running.
			CameraButton.Enabled = false;
			RecordButton.Enabled = false;
			StillButton.Enabled = false;

			// Create the AVCaptureSession.
			Session = new AVCaptureSession ();

			// Setup the preview view.
			PreviewView.Session = Session;

			// Communicate with the session and other session objects on this queue.
			SessionQueue = new DispatchQueue ("session queue");
			SetupResult = AVCamSetupResult.Success;

			// Check video authorization status. Video access is required and audio access is optional.
			// If audio access is denied, audio is not recorded during movie recording.
			switch (AVCaptureDevice.GetAuthorizationStatus (AVMediaType.Video)) {
				// The user has previously granted access to the camera.
				case AVAuthorizationStatus.Authorized:
					break;

				// The user has not yet been presented with the option to grant video access.
				// We suspend the session queue to delay session setup until the access request has completed to avoid
				// asking the user for audio access if video access is denied.
				// Note that audio access will be implicitly requested when we create an AVCaptureDeviceInput for audio during session setup.
				case AVAuthorizationStatus.NotDetermined:
					SessionQueue.Suspend ();
					var granted = await AVCaptureDevice.RequestAccessForMediaTypeAsync (AVMediaType.Video);
					if (!granted)
						SetupResult = AVCamSetupResult.CameraNotAuthorized;
					SessionQueue.Resume ();
					break;

				// The user has previously denied access.
				default:
					SetupResult = AVCamSetupResult.CameraNotAuthorized;
					break;
			}

			// Setup the capture session.
			// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
			// Why not do all of this on the main queue?
			// Because AVCaptureSession.StartRunning is a blocking call which can take a long time. We dispatch session setup to the sessionQueue
			// so that the main queue isn't blocked, which keeps the UI responsive.
			SessionQueue.DispatchAsync (() => {
				if (SetupResult != AVCamSetupResult.Success)
					return;

				backgroundRecordingID = -1;
				NSError error;
				AVCaptureDevice videoDevice = CreateDevice (AVMediaType.Video, AVCaptureDevicePosition.Back);
				AVCaptureDeviceInput videoDeviceInput = AVCaptureDeviceInput.FromDevice (videoDevice, out error);
				if (videoDeviceInput == null)
					Console.WriteLine ("Could not create video device input: {0}", error);

				Session.BeginConfiguration ();
				if (Session.CanAddInput (videoDeviceInput)) {
					Session.AddInput (VideoDeviceInput = videoDeviceInput);
					DispatchQueue.MainQueue.DispatchAsync (() => {
						// Why are we dispatching this to the main queue?
						// Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
						// can only be manipulated on the main thread.
						// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
						// on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
						// Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
						// ViewWillTransitionToSize method.
						UIInterfaceOrientation statusBarOrientation = UIApplication.SharedApplication.StatusBarOrientation;
						AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
						if (statusBarOrientation != UIInterfaceOrientation.Unknown)
							initialVideoOrientation = (AVCaptureVideoOrientation)(long)statusBarOrientation;

						var previewLayer = (AVCaptureVideoPreviewLayer)PreviewView.Layer;
						previewLayer.Connection.VideoOrientation = initialVideoOrientation;
					});
				} else {
					Console.WriteLine ("Could not add video device input to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				AVCaptureDevice audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Audio);
				AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice (audioDevice, out error);
				if (audioDeviceInput == null)
					Console.WriteLine ("Could not create audio device input: {0}", error);

				if (Session.CanAddInput (audioDeviceInput))
					Session.AddInput (audioDeviceInput);
				else
					Console.WriteLine ("Could not add audio device input to the session");

				var movieFileOutput = new AVCaptureMovieFileOutput ();
				if (Session.CanAddOutput (movieFileOutput)) {
					Session.AddOutput (MovieFileOutput = movieFileOutput);
					AVCaptureConnection connection = movieFileOutput.ConnectionFromMediaType (AVMediaType.Video);
					if (connection.SupportsVideoStabilization)
						connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
				} else {
					Console.WriteLine ("Could not add movie file output to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				var stillImageOutput = new AVCaptureStillImageOutput ();
				if (Session.CanAddOutput (stillImageOutput)) {
					stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed {
						Codec = AVVideoCodec.JPEG
					};
					Session.AddOutput (StillImageOutput = stillImageOutput);
				} else {
					Console.WriteLine ("Could not add still image output to the session");
					SetupResult = AVCamSetupResult.SessionConfigurationFailed;
				}

				Session.CommitConfiguration ();
			});
		}
예제 #28
0
        public Task <OperationResult> Setup(bool enableAudioRecording, bool enableStillImageCapture = false, UIInterfaceOrientation orientation = UIInterfaceOrientation.Portrait, int numberOfCameras = 1)
        {
            TaskCompletionSource <OperationResult> tcs = new TaskCompletionSource <OperationResult>();
            var warnings = new List <string>();

            NumberOfCameras = numberOfCameras;

            _enableAudioRecording    = enableAudioRecording;
            _enableStillImageCapture = enableStillImageCapture;
            _session = new AVCaptureSession();

            _backgroundRecordingID = -1;
            NSError error;
            var     result = AVCaptureDeviceFactory.CreateDevice(AVMediaType.Video, AVCaptureDevicePosition.Back);

            if (!result.IsSuccessful)
            {
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
                tcs.SetResult(OperationResult.AsFailure("No video devices found, probably running in the simulator"));
                return(tcs.Task);
            }

            _videoDeviceInput = AVCaptureDeviceInput.FromDevice(result.Result, out error);

            if (_videoDeviceInput == null)
            {
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
                tcs.SetResult(OperationResult.AsFailure(@"Could not create video device input: {error}"));
                return(tcs.Task);
            }

            _session.BeginConfiguration();
            if (_session.CanAddInput(_videoDeviceInput))
            {
                _session.AddInput(_videoDeviceInput);

                var initialVideoOrientation = (AVCaptureVideoOrientation)(long)orientation;
                PreviewLayer.Session      = _session;
                PreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                PreviewLayer.Connection.VideoOrientation = initialVideoOrientation;
            }
            else
            {
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
                tcs.SetResult(OperationResult.AsFailure("Could not add video device input to the session"));
                return(tcs.Task);
            }

            if (_enableAudioRecording)
            {
                AVCaptureDevice      audioDevice      = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio);
                AVCaptureDeviceInput audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error);
                if (audioDeviceInput == null)
                {
                    warnings.Add(@"Could not create audio device input: {error}");
                }
                else
                {
                    if (_session.CanAddInput(audioDeviceInput))
                    {
                        _session.AddInput(audioDeviceInput);
                    }
                    else
                    {
                        warnings.Add("Could not add audio device input to the session");
                    }
                }
            }

            _movieFileOutput = new AVCaptureMovieFileOutput();
            if (_session.CanAddOutput(_movieFileOutput))
            {
                _session.AddOutput(_movieFileOutput);
                AVCaptureConnection connection = _movieFileOutput.ConnectionFromMediaType(AVMediaType.Video);
                if (connection.SupportsVideoStabilization)
                {
                    connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
                }
            }
            else
            {
                warnings.Add("Could not add movie file output to the session");
                _setupResult = CameraSetupResult.SessionConfigurationFailed;
            }

            if (_enableStillImageCapture)
            {
                _stillImageOutput = new AVCaptureStillImageOutput();
                if (_session.CanAddOutput(_stillImageOutput))
                {
                    _stillImageOutput.CompressedVideoSetting = new AVVideoSettingsCompressed
                    {
                        Codec = AVVideoCodec.JPEG
                    };
                    _session.AddOutput(_stillImageOutput);
                }
                else
                {
                    warnings.Add("Could not add still image output to the session");
                    _setupResult = CameraSetupResult.SessionConfigurationFailed;
                }
            }

            _session.CommitConfiguration();

            _setupResult = CameraSetupResult.Success;
            tcs.SetResult(OperationResult.AsSuccess(string.Empty, warnings));

            AddObservers();

            return(tcs.Task);
        }
예제 #29
0
        protected void Initialize()
        {
            // configure the capture session for medium resolution, change this if your code
            // can cope with more data or volume
            CaptureSession = new AVCaptureSession
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };
            previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };

            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device         = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            if (device == null)
            {
                return;
            }

            // SET to slow motion



            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            movieFileOutput = new AVCaptureMovieFileOutput
            {
                //set max record time to 10 minutes
                MaxRecordedDuration = CMTime.FromSeconds(600, 1)
            };


            photoFileOutput = new AVCapturePhotoOutput();

            photoFileOutput.IsHighResolutionCaptureEnabled = true;

            if (CaptureSession.CanAddOutput(movieFileOutput))
            {
                CaptureSession.BeginConfiguration();
                CaptureSession.AddOutput(movieFileOutput);
                CaptureSession.AddOutput(photoFileOutput);
                var ranges = device.ActiveFormat.VideoSupportedFrameRateRanges;
                if (device.LockForConfiguration(out error))
                {
                    device.ActiveVideoMinFrameDuration = new CMTime(1, (int)ranges.First().MinFrameRate);
                    device.ActiveVideoMaxFrameDuration = new CMTime(1, (int)ranges.First().MaxFrameRate);
                }

                var connection = movieFileOutput.ConnectionFromMediaType(AVMediaType.Video);
                if (connection != null)
                {
                    if (connection.SupportsVideoStabilization)
                    {
                        connection.PreferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.Auto;
                    }
                }
                CaptureSession.CommitConfiguration();
            }

            CaptureSession.AddInput(input);
            Layer.AddSublayer(previewLayer);
            CaptureSession.StartRunning();
            // set frame rate if Slow-mo is requested
            if (speedOptions == SpeedOptions.SlowMo)
            {
                foreach (var vFormat in device.Formats)
                {
                    var _ranges    = vFormat.VideoSupportedFrameRateRanges as AVFrameRateRange[];
                    var frameRates = _ranges[0];

                    if (frameRates.MaxFrameRate >= 240.0)
                    {
                        device.LockForConfiguration(out NSError _error);
                        if (_error is null)
                        {
                            device.ActiveFormat = vFormat as AVCaptureDeviceFormat;
                            device.ActiveVideoMinFrameDuration = frameRates.MinFrameDuration;
                            device.ActiveVideoMaxFrameDuration = frameRates.MaxFrameDuration;
                            device.UnlockForConfiguration();
                            break;
                        }
                    }
                }
            }


            IsPreviewing = true;
        }
예제 #30
0
        public bool SetupCapture()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            captureSession = new AVCaptureSession()
            {
                //SessionPreset = AVCaptureSession.PresetPhoto
                SessionPreset = AVCaptureSession.Preset1280x720
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            if (captureDeviceInput == null)
            {
                Console.WriteLine("No video input device");
                return(false);
            }

            if (captureSession.CanAddInput(captureDeviceInput))
            {
                captureSession.AddInput(captureDeviceInput);
            }
            else
            {
                Console.WriteLine("Could not add input capture device to AVACaptureSession");
                return(false);
            }


            // create a VideoDataOutput and add it to the sesion
            AVCaptureVideoDataOutput output = new AVCaptureVideoDataOutput
            {
                AlwaysDiscardsLateVideoFrames = false, // true,
                WeakVideoSettings             = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV24RGB
                }.Dictionary //,

                // If you want to cap the frame rate at a given speed, in this sample: 30 frames per second
                //MinFrameDuration = new CMTime(1, 30)
            };


            CoreFoundation.DispatchQueue videoCaptureQueue = new CoreFoundation.DispatchQueue("Video Capture Queue");
            output.SetSampleBufferDelegateQueue(this, videoCaptureQueue);

            if (captureSession.CanAddOutput(output))
            {
                captureSession.AddOutput(output);
            }
            else
            {
                return(false);
            }

            // add preview layer to this view controller's NSView
            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(captureSession);

            previewLayer.Frame        = this.View.Bounds;
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;

            if (this.View.Layer == null)
            {
                this.View.WantsLayer = true;
                this.View.Layer      = previewLayer;
            }
            else
            {
                this.View.WantsLayer = true;
                this.View.Layer.AddSublayer(previewLayer);
            }

            captureSession.StartRunning();

            return(true);
        }
예제 #31
0
        private void BeginSession()
        {
            try
            {
                NSError error       = null;
                var     deviceInput = new AVCaptureDeviceInput(captureDevice, out error);
                if (error == null && captureSession.CanAddInput(deviceInput))
                {
                    captureSession.AddInput(deviceInput);
                }
                previewLayer = new AVCaptureVideoPreviewLayer(captureSession)
                {
                    VideoGravity = AVLayerVideoGravity.ResizeAspect
                };
                //this.HomeView.BackgroundColor = UIColor.Black;
                previewLayer.Frame = this.HomeView.Layer.Bounds;

                this.HomeView.Layer.AddSublayer(previewLayer);

                captureDevice.LockForConfiguration(out error);
                if (error != null)
                {
                    Console.WriteLine(error);
                    captureDevice.UnlockForConfiguration();
                    return;
                }

                if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
                {
                    captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);
                }
                captureDevice.UnlockForConfiguration();

                captureSession.StartRunning();

                // create a VideoDataOutput and add it to the sesion
                videoOut = new AVCaptureVideoDataOutput()
                {
                    AlwaysDiscardsLateVideoFrames = true,
                    WeakVideoSettings             = new CVPixelBufferAttributes()
                    {
                        PixelFormatType = CVPixelFormatType.CV32BGRA
                    }.Dictionary
                };

                if (captureSession.CanAddOutput(videoOut))
                {
                    captureSession.AddOutput(videoOut);
                }



                captureSession.CommitConfiguration();

                setupAVFoundationFaceDetection();

                //var OutputSampleDelegate = new VideoCapture(
                //(s) =>
                //{
                //    GreetingsLabel.Text = s;
                //    PopulateList(s);
                //}, new Action<CIImage, CGRect>(DrawFaces));

                //videoOut.SetSampleBufferDelegateQueue(OutputSampleDelegate, sessionQueue);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
        }
예제 #32
0
        void ConfigureSession()
        {
            if (setupResult != AVCamSetupResult.Success)
            {
                return;
            }

            session.BeginConfiguration();

            // We do not create an AVCaptureMovieFileOutput when setting up the session because the
            // AVCaptureMovieFileOutput does not support movie recording with AVCaptureSessionPresetPhoto.
            session.SessionPreset = AVCaptureSession.PresetPhoto;

            // Add video input.
            // Choose the back dual camera if available, otherwise default to a wide angle camera.
            var defaultVideoDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDuoCamera, AVMediaType.Video, AVCaptureDevicePosition.Back)
                                     ?? AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Back)
                                     ?? AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInWideAngleCamera, AVMediaType.Video, AVCaptureDevicePosition.Front);

            NSError error;
            var     input = AVCaptureDeviceInput.FromDevice(defaultVideoDevice, out error);

            if (error != null)
            {
                Console.WriteLine($"Could not create video device input: {error.LocalizedDescription}");
                setupResult = AVCamSetupResult.SessionConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            if (session.CanAddInput(input))
            {
                session.AddInput(input);
                videoDeviceInput = input;

                DispatchQueue.MainQueue.DispatchAsync(() => {
                    // Why are we dispatching this to the main queue?
                    // Because AVCaptureVideoPreviewLayer is the backing layer for PreviewView and UIView
                    // can only be manipulated on the main thread.
                    // Note: As an exception to the above rule, it is not necessary to serialize video orientation changes
                    // on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
                    // Use the status bar orientation as the initial video orientation. Subsequent orientation changes are handled by
                    // ViewWillTransitionToSize method.
                    var statusBarOrientation    = UIApplication.SharedApplication.StatusBarOrientation;
                    var initialVideoOrientation = AVCaptureVideoOrientation.Portrait;
                    AVCaptureVideoOrientation videoOrientation;
                    if (statusBarOrientation != UIInterfaceOrientation.Unknown && TryConvertToVideoOrientation(statusBarOrientation, out videoOrientation))
                    {
                        initialVideoOrientation = videoOrientation;
                    }

                    PreviewView.VideoPreviewLayer.Connection.VideoOrientation = initialVideoOrientation;
                });
            }
            else
            {
                Console.WriteLine("Could not add video device input to the session");
                setupResult = AVCamSetupResult.SessionConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            // Add audio input.
            //var audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
            var audioDevice      = AVCaptureDevice.GetDefaultDevice(AVMediaType.Audio);
            var audioDeviceInput = AVCaptureDeviceInput.FromDevice(audioDevice, out error);

            if (error != null)
            {
                Console.WriteLine($"Could not create audio device input: {error.LocalizedDescription}");
            }
            if (session.CanAddInput(audioDeviceInput))
            {
                session.AddInput(audioDeviceInput);
            }
            else
            {
                Console.WriteLine("Could not add audio device input to the session");
            }

            // Add photo output.
            if (session.CanAddOutput(photoOutput))
            {
                session.AddOutput(photoOutput);
                photoOutput.IsHighResolutionCaptureEnabled = true;
                photoOutput.IsLivePhotoCaptureEnabled      = photoOutput.IsLivePhotoCaptureSupported;
                livePhotoMode = photoOutput.IsLivePhotoCaptureSupported ? LivePhotoMode.On : LivePhotoMode.Off;
            }
            else
            {
                Console.WriteLine("Could not add photo output to the session");
                setupResult = AVCamSetupResult.SessionConfigurationFailed;
                session.CommitConfiguration();
                return;
            }
            session.CommitConfiguration();
        }
예제 #33
0
        bool SetupCaptureSession()
        {
            //Console.WriteLine ("SetupCaptureSession");
            // Overview: RosyWriter uses separate GCD queues for audio and video capture.  If a single GCD queue
            // is used to deliver both audio and video buffers, and our video processing consistently takes
            // too long, the delivery queue can back up, resulting in audio being dropped.
            //
            // When recording, RosyWriter creates a third GCD queue for calls to AVAssetWriter.  This ensures
            // that AVAssetWriter is not called to start or finish writing from multiple threads simultaneously.
            //
            // RosyWriter uses AVCaptureSession's default preset, AVCaptureSessionPresetHigh.

            // Create Capture session
            captureSession = new AVCaptureSession();
            captureSession.BeginConfiguration();

            // Create audio connection
            NSError error;
            var     audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);

            if (audioDevice == null)
            {
                return(false);                // e.g. simulator
            }
            var audioIn = new AVCaptureDeviceInput(audioDevice, out error);

            if (captureSession.CanAddInput(audioIn))
            {
                captureSession.AddInput(audioIn);
            }

            var audioOut          = new AVCaptureAudioDataOutput();
            var audioCaptureQueue = new DispatchQueue("Audio Capture Queue");

            // Add the Delegate to capture each sample that comes through
            audioOut.SetSampleBufferDelegateQueue(this, audioCaptureQueue);

            if (captureSession.CanAddOutput(audioOut))
            {
                captureSession.AddOutput(audioOut);
            }

            audioConnection = audioOut.ConnectionFromMediaType(AVMediaType.Audio);

            // Create Video Session
            var videoDevice = VideoDeviceWithPosition(AVCaptureDevicePosition.Back);
            var videoIn     = new AVCaptureDeviceInput(videoDevice, out error);

            if (captureSession.CanAddInput(videoIn))
            {
                captureSession.AddInput(videoIn);
            }

            // RosyWriter prefers to discard late video frames early in the capture pipeline, since its
            // processing can take longer than real-time on some platforms (such as iPhone 3GS).
            // Clients whose image processing is faster than real-time should consider setting AVCaptureVideoDataOutput's
            // alwaysDiscardsLateVideoFrames property to NO.
            var videoOut = new AVCaptureVideoDataOutput {
                AlwaysDiscardsLateVideoFrames = true,
                // HACK: Change VideoSettings to WeakVideoSettings, and AVVideoSettings to CVPixelBufferAttributes
                // VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
                WeakVideoSettings = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary
            };

            // Create a DispatchQueue for the Video Processing
            var videoCaptureQueue = new DispatchQueue("Video Capture Queue");

            videoOut.SetSampleBufferDelegateQueue(this, videoCaptureQueue);

            if (captureSession.CanAddOutput(videoOut))
            {
                captureSession.AddOutput(videoOut);
            }

            // Set the Video connection from the Video Output object
            videoConnection  = videoOut.ConnectionFromMediaType(AVMediaType.Video);
            videoOrientation = videoConnection.VideoOrientation;

            captureSession.CommitConfiguration();

            return(true);
        }
        private void InitCamera()
        {
            //ADD DEVICE INPUTS
            try
            {
                //If no camera avaiable, return
                if (!IsCameraAvailable)
                {
                    return;
                }

                //Set up a new AV capture session
                session = new AVCaptureSession();                 //Set up a new session

                //add video capture device
                var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
                AVCaptureDevicePosition cameraPosition = (CameraOption == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
                var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);                 //Get the first device where the camera matches the requested camera

                if (device == null)
                {
                    //use the default camera if front isn't available
                    device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
                }

                if (device == null)
                {
                    return;                     //No device available
                }

                input = AVCaptureDeviceInput.FromDevice(device);
                session.AddInput(input);

                //add audio capture device
                audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                audioInput  = AVCaptureDeviceInput.FromDevice(audioDevice);
                session.AddInput(audioInput);
            }
            catch (Exception ex)
            {
                return;
            }

            //Set up preview layer (shows what the input device sees)
            previewlayer       = new AVCaptureVideoPreviewLayer(session);
            previewlayer.Frame = Bounds;


            if (OrientationOption == OrientationOptions.Landscape)
            {
                //landscape
                previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.LandscapeRight;                 //Video is recoreded upside down but oriented correctly for right handed people
                //previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait; //VIdeo recorded portrait, face to left
                //previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.LandscapeLeft;
            }
            else
            {
                //portrait
                previewlayer.Connection.VideoOrientation = AVCaptureVideoOrientation.Portrait;
            }

            output = new AVCaptureMovieFileOutput();
            long   totalSeconds       = 10000;
            Int32  preferredTimeScale = 30;
            CMTime maxDuration        = new CMTime(totalSeconds, preferredTimeScale);

            output.MinFreeDiskSpaceLimit = 1024 * 1024;
            output.MaxRecordedDuration   = maxDuration;

            if (session.CanAddOutput(output))
            {
                session.AddOutput(output);
            }

            //Resolutions available @ http://stackoverflow.com/questions/19422322/method-to-find-devices-camera-resolution-ios
            session.SessionPreset = AVCaptureSession.PresetHigh;             //Widescreen (Medium is 4:3)
            Layer.AddSublayer(previewlayer);
            //session.StartRunning(); //Moved this to StartPreviewing
        }