/******************************* MAIN FUNCTIONS *******************************/
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			CaptureSession = new AVCaptureSession();
			CaptureSession.AddInput (input as AVCaptureInput);

			var captureMetadataOutput = new AVCaptureMetadataOutput();
			metadataDelegate = new MetadataObjectsDelegate();
			metadataDelegate.outer = this;
			captureMetadataOutput.SetDelegate(metadataDelegate, DispatchQueue.MainQueue);
			CaptureSession.AddOutput(captureMetadataOutput);
			captureMetadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode;

			VideoPreviewLayer = new AVCaptureVideoPreviewLayer (CaptureSession);
			VideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			VideoPreviewLayer.Frame = View.Layer.Bounds;
			View.Layer.AddSublayer (VideoPreviewLayer);

			View.BringSubviewToFront (messageLabel);

			QRCodeFrameView = new UIView ();
			QRCodeFrameView.Layer.BorderColor = UIColor.Green.CGColor;
			QRCodeFrameView.Layer.BorderWidth = 2;
			View.AddSubview (QRCodeFrameView);
			View.BringSubviewToFront (QRCodeFrameView);

			CaptureSession.StartRunning();

			cancelButton.Clicked += (sender, e) => {
				this.DismissViewController (true, null);
			};
		}
Пример #2
0
        void SetupLiveCameraStream()
        {
            _captureSession = new AVCaptureSession();

            var viewLayer = _liveCameraStream.Layer;

            _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession)
            {
                Frame = _liveCameraStream.Bounds
            };
            _liveCameraStream.Layer.AddSublayer(_videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            //var dictionary = new NSMutableDictionary();
            //dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);


            _stillImageOutput = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            _captureSession.AddOutput(_stillImageOutput);
            _captureSession.AddInput(_captureDeviceInput);
            _captureSession.StartRunning();
        }
Пример #3
0
        void setupAVCapture()
        {
            session = new AVCaptureSession();
            session.SessionPreset = AVCaptureSession.PresetHigh;
            previewView.Session   = session;

            updateCameraSelection();
            CALayer rootLayer = previewView.Layer;

            rootLayer.MasksToBounds = true;
            (previewView.Layer as AVCaptureVideoPreviewLayer).VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            previewView.Layer.BackgroundColor = UIColor.Black.CGColor;

            setupAVFoundationFaceDetection();

            if (device != null)
            {
                device.AddObserver(this, (NSString)"videoZoomFactor", (NSKeyValueObservingOptions)0,
                                   VideoZoomFactorContext);
                device.AddObserver(this, (NSString)"rampingVideoZoom", (NSKeyValueObservingOptions)0,
                                   VideoZoomRampingContext);
            }

            session.StartRunning();
        }
Пример #4
0
        public void SetupLiveCameraStream(object cameraView)
        {
            _cameraView    = (UIView)cameraView;
            captureSession = new AVCaptureSession();
            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame        = _cameraView.Frame,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };

            _cameraView.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            captureSession.AddInput(captureDeviceInput);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.StartRunning();
        }
Пример #5
0
        public void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            //var viewLayer = liveCameraStream.Layer;
            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };

            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);

            // HACK: Dunno why this is returning null????
            if (captureDevice is null)
            {
                return;
            }
            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            //var dictionary = new NSMutableDictionary
            //{
            //    [AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG)
            //};
            stillImageOutput = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.StartRunning();
        }
Пример #6
0
        public void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            var viewLayer = liveCameraStream.Layer;

            Console.WriteLine(viewLayer.Frame.Width);

            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };
            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            Console.WriteLine(liveCameraStream.Layer.Frame.Width);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            var dictionary = new NSMutableDictionary();
            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.StartRunning();

            ViewWillLayoutSubviews();
        }
Пример #7
0
		void setupAVCapture ()
		{
			session = new AVCaptureSession ();
			session.SessionPreset = AVCaptureSession.PresetHigh;
			previewView.Session = session;

			updateCameraSelection ();
			CALayer rootLayer = previewView.Layer;
			rootLayer.MasksToBounds = true;
			// HACK: Remove .ToString() for AVLayerVideoGravity
			// (previewView.Layer as AVCaptureVideoPreviewLayer).VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
			(previewView.Layer as AVCaptureVideoPreviewLayer).VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			previewView.Layer.BackgroundColor = UIColor.Black.CGColor;

			setupAVFoundationFaceDetection ();

			if (device != null) {
				device.AddObserver (this, (NSString) "videoZoomFactor", (NSKeyValueObservingOptions)0,
				                    VideoZoomFactorContext);
				device.AddObserver (this, (NSString) "rampingVideoZoom", (NSKeyValueObservingOptions)0,
				                    VideoZoomRampingContext);
			}

			session.StartRunning ();
		}
Пример #8
0
        public void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            //	var viewLayer = liveCameraStream.Layer;
            //	videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            //	{
            //		Frame = this.View.Frame
            //	};
            //	liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            captureSession.AddInput(captureDeviceInput);

            var dictionary = new NSMutableDictionary();

            dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
            stillImageOutput             = new AVCaptureStillImageOutput()
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.StartRunning();
        }
Пример #9
0
        public void RecordVideoToPath(UIViewController ViewController, string VideoPath)
        {
            // setup capture device
              AVCaptureDevice videoRecordingDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
              NSError error;
              AVCaptureDeviceInput videoInput = new AVCaptureDeviceInput(videoRecordingDevice, out error);

              // create and assign a capture session
              AVCaptureSession captureSession = new AVCaptureSession();
              captureSession.SessionPreset = AVCaptureSession.Preset1280x720;
              captureSession.AddInput(videoInput);

              // Create capture device output
              AVCaptureVideoDataOutput videoOutput = new AVCaptureVideoDataOutput();
              captureSession.AddOutput(videoOutput);
              videoOutput.VideoSettings.PixelFormat = CVPixelFormatType.CV32BGRA;
              videoOutput.MinFrameDuration = new CMTime(1, 30);
              videoOutput.SetSampleBufferDelegatequeue(captureVideoDelegate, System.IntPtr.Zero);

              // create a delegate class for handling capture
              captureVideoDelegate = new CaptureVideoDelegate(ViewController);

              // Start capture session
              captureSession.StartRunning();
        }
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession () {
				SessionPreset = AVCaptureSession.PresetMedium
			};

			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null){
				// No input device
				return false;
			}
			session.AddInput (input);

			// create a VideoDataOutput and add it to the sesion
			var output = new AVCaptureVideoDataOutput () {
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA)
			};

			// configure the output
			queue = new DispatchQueue ("myQueue");
			qrScanner = new QrScanner (this);
			output.SetSampleBufferDelegateAndQueue (qrScanner, queue);
			session.AddOutput (output);

			previewLayer = new AVCaptureVideoPreviewLayer (session);
			previewLayer.Orientation = AVCaptureVideoOrientation.Portrait;
			previewLayer.VideoGravity = "AVLayerVideoGravityResizeAspectFill";

			session.StartRunning ();
			return true;
		}
Пример #11
0
        /// <summary>
        /// Initializes this instance.
        /// </summary>
        private void Initialize()
        {
            var captureSession = new AVCaptureSession();

            _previewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill,
                Frame             = Bounds
            };

            var device = AVCaptureDevice.DefaultDeviceWithMediaType(
                AVMediaType.Video);

            if (device == null)
            {
                System.Diagnostics.Debug.WriteLine("No device detected.");
                return;
            }

            NSError error;

            var input = new AVCaptureDeviceInput(device, out error);

            captureSession.AddInput(input);

            Layer.AddSublayer(_previewLayer);

            captureSession.StartRunning();
        }
Пример #12
0
        private void SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();

            var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = liveCameraStream.Bounds
            };

            liveCameraStream.Layer.AddSublayer(videoPreviewLayer);

            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            stillImageOutput = new AVCaptureStillImageOutput
            {
                OutputSettings = new NSDictionary()
            };

            captureSession.AddOutput(stillImageOutput);
            captureSession.AddInput(captureDeviceInput);
            captureSession.StartRunning();
        }
Пример #13
0
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession () {
				SessionPreset = AVCaptureSession.PresetMedium
			};
			
			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null){
				Console.WriteLine ("No input device");
				return false;
			}
			session.AddInput (input);
			
			// create a VideoDataOutput and add it to the sesion
			var output = new AVCaptureVideoDataOutput () {
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
				
				// If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
				MinFrameDuration = new CMTime (1, 15)
			};
			
			// configure the output
			queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue");
			outputRecorder = new OutputRecorder ();
			output.SetSampleBufferDelegateAndQueue (outputRecorder, queue);
			session.AddOutput (output);
			
			session.StartRunning ();
			return true;
		}
Пример #14
0
        void Initialize()
        {
            CaptureSession = new AVCaptureSession();
            previewLayer   = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = Bounds,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill
            };

            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device         = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            if (device == null)
            {
                return;
            }

            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            CaptureSession.AddInput(input);
            Layer.AddSublayer(previewLayer);
            CaptureSession.StartRunning();
            IsPreviewing = true;
        }
Пример #15
0
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                return(false);
            }
            //Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
            NSError error = null;

            captureDevice.LockForConfiguration(out error);
            if (error != null)
            {
                Console.WriteLine(error);
                captureDevice.UnlockForConfiguration();
                return(false);
            }
            if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
            {
                captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15);
            }
            captureDevice.UnlockForConfiguration();

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");
                return(false);
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            var output = new AVCaptureVideoDataOutput()
            {
                WeakVideoSettings = new CVPixelBufferAttributes()
                {
                    PixelFormatType = CVPixelFormatType.CV32BGRA
                }.Dictionary,
            };

            // configure the output
            queue          = new CoreFoundation.DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder();
            output.SetSampleBufferDelegate(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
            return(true);
        }
Пример #16
0
 public void StartPreview()
 {
     if (!captureSession.Running)
     {
         captureSession.StartRunning();
     }
     IsPreviewing = true;
 }
Пример #17
0
 public void ResumeCapture()
 {
     if (!isCapturing)
     {
         session?.StartRunning();
         isCapturing = true;
     }
 }
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                Image <Bgr, Byte> img = new Image <Bgr, byte> (512, 512, new Bgr(255, 255, 255));
                CvInvoke.PutText(
                    img,
                    "Capture device not found.",
                    new Point(10, 200),
                    FontFace.HersheyComplex,
                    1,
                    new MCvScalar(),
                    2);
                ImageView.Image = img.ToUIImage();
                return(false);
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input device");
                return(false);
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
            return(true);
        }
Пример #19
0
 public override void ViewDidAppear(bool animated)
 {
     if (_captureSession == null)
     {
         AuthorizeCameraUse();
     }
     else if (!_captureSession.Running)
     {
         _captureSession.StartRunning();
     }
 }
        public void StartPreviewing(object sender, EventArgs e)
        {
            if (XamRecorder.IsPreviewing)
            {
                throw new Exception("Preview has already started.");
            }

            session.StartRunning();

            XamRecorder.IsPreviewing = true;
        }
Пример #21
0
        public void ConfigureDeviceAndStart()
        {
            var device = GetDevice();

            if (device == null)
            {
                return;
            }

            try
            {
                if (device.LockForConfiguration(out var error))
                {
                    if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                    {
                        device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                    }

                    device.UnlockForConfiguration();
                }

                // Configure Input
                var input = AVCaptureDeviceInput.FromDevice(device, out var error2);
                _captureSession.AddInput(input);

                // Configure Output
                var settings = new AVVideoSettingsUncompressed()
                {
                    PixelFormatType = CoreVideo.CVPixelFormatType.CV32BGRA
                };

                var videoOutput = new AVCaptureVideoDataOutput
                {
                    WeakVideoSettings             = settings.Dictionary,
                    AlwaysDiscardsLateVideoFrames = true
                };

                var videoCaptureQueue = new DispatchQueue("Video Queue");
                videoOutput.SetSampleBufferDelegateQueue(new OutputRecorder(View, _shapeLayer), videoCaptureQueue);

                if (_captureSession.CanAddOutput(videoOutput))
                {
                    _captureSession.AddOutput(videoOutput);
                }

                // Start session
                _captureSession.StartRunning();
            }
            catch (Exception e)
            {
                Console.Write(e);
            }
        }
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession();

            AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null)
            {
                Console.WriteLine("No video camera (in simulator?)");
                return(false);                // simulator?
            }

            NSError error = null;

            AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error);

            if (input == null)
            {
                Console.WriteLine("Error: " + error);
            }
            else
            {
                session.AddInput(input);
            }

            AVCaptureMetadataOutput output = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(this);

            output.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(output);

            // This could be any list of supported barcode types
            output.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode };
            // OR you could just accept "all" with the following line;
//			output.MetadataObjectTypes = output.AvailableMetadataObjectTypes;  // empty
            // DEBUG: use this if you're curious about the available types
//			foreach (var t in output.AvailableMetadataObjectTypes)
//				Console.WriteLine(t);


            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session);

            //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height);
            previewLayer.Frame        = new RectangleF(0, 0, 320, 290);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            View.Layer.AddSublayer(previewLayer);

            session.StartRunning();

            Console.WriteLine("StartRunning");
            return(true);
        }
Пример #23
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            OCR = new TesseractApi();

            CaptureSession = new AVCaptureSession();
            ImageOutput    = new AVCapturePhotoOutput();

            var cameraDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            var cameraInput  = AVCaptureDeviceInput.FromDevice(cameraDevice);

            CaptureSession.AddInput(cameraInput);
            CaptureSession.AddOutput(ImageOutput);

            SetupUI();
            CaptureSession.StartRunning();

            Camera = new CameraHandler();
            Camera.FinishedProcessing += async delegate
            {
                PictureView.Image = new UIImage(Camera.Picture, 1f, UIImageOrientation.Right);
                Capture           = PictureView.Capture();
                await InitReader();
            };

            OCRButton.TouchUpInside += async delegate
            {
                HandleButtonClick();
            };

            AlphaNumericSwitch.ValueChanged += async delegate
            {
                await SetOcrTextLabel();
            };

            // Selection slider Setup
            SelectionBarSlider.TouchUpInside += async delegate
            {
                await InitReader();
            };

            SelectionBarSlider.TouchUpOutside += async delegate
            {
                await InitReader();
            };

            SelectionBarSlider.ValueChanged += delegate
            {
                var tempFrame = SelectionBarView.Frame;
                tempFrame.Y            = (SelectionBarSlider.Value * 92) + 22;
                SelectionBarView.Frame = tempFrame;
            };
        }
 public override void ViewDidAppear(bool animated)
 {
     _orientationChangeEventToken = NSNotificationCenter.DefaultCenter.AddObserver(UIDevice.OrientationDidChangeNotification, CheckDeviceOrientation);
     if (_captureSession == null)
     {
         AuthorizeCameraUse();
     }
     else if (!_captureSession.Running)
     {
         _captureSession.StartRunning();
     }
 }
Пример #25
0
        private void StartCameraPreview()
        {
            captureSession = new AVCaptureSession();
            captureSession.AddInput(AVCaptureDeviceInput.FromDevice(MediaDevices.BackCamera));

            captureLayer       = new AVCaptureVideoPreviewLayer(captureSession);
            captureLayer.Frame = captureView.Bounds;

            captureView.Layer.AddSublayer(captureLayer);

            captureSession.StartRunning();
        }
        void SetupAVCapture(NSString sessionPreset)
        {
            if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext(context)) == null)
            {
                Console.WriteLine("Could not create the CoreVideo TextureCache");
                return;
            }
            session = new AVCaptureSession();
            session.BeginConfiguration();

            // Preset size
            session.SessionPreset = sessionPreset;

            // Input device
            var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (videoDevice == null)
            {
                Console.WriteLine("No video device");
                return;
            }
            NSError err;
            var     input = new AVCaptureDeviceInput(videoDevice, out err);

            if (err != null)
            {
                Console.WriteLine("Error creating video capture device");
                return;
            }
            session.AddInput(input);

            // Create the output device
            var dataOutput = new AVCaptureVideoDataOutput()
            {
                AlwaysDiscardsLateVideoFrames = true,

                // YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of
                // memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
                VideoSettings = new AVVideoSettings(CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)
            };

            dataOutputDelegate = new DataOutputDelegate(this);

            //
            // This dispatches the video frames into the main thread, because the OpenGL
            // code is accessing the data synchronously.
            //
            dataOutput.SetSampleBufferDelegateAndQueue(dataOutputDelegate, DispatchQueue.MainQueue);
            session.AddOutput(dataOutput);
            session.CommitConfiguration();
            session.StartRunning();
        }
Пример #27
0
        /// <summary>
        /// Initializes this instance.
        /// </summary>
        private void Initialize()
        {
            var captureSession = new AVCaptureSession();

            _previewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,

                Frame = Bounds
            };
            // create a device input and attach it to the session
            AVCaptureDevice frontCameraDevice = MediaDevices.FrontCamera;

            if (frontCameraDevice == null)
            {
                LoadErrors = "Cannot load camera";
                return;
            }

            try
            {
                NSError error = null;
                frontCameraDevice.LockForConfiguration(out error);
                if (error != null)
                {
                    Debug.WriteLine(error);
                    //throw new BusinessLogicException (ErrorCode.EC_UNDEFINED_ERROR, error.ToString());
                    //TODO: show error to user
                }
                frontCameraDevice.ActiveVideoMinFrameDuration = new CMTime(1, 15); // Configure for 15 FPS
            }
            finally
            {
                frontCameraDevice.UnlockForConfiguration();
            }

            AVCaptureDeviceInput inputDevice = AVCaptureDeviceInput.FromDevice(frontCameraDevice);

            if (inputDevice == null)
            {
                LoadErrors = "Cannot access front camera";
                return;
            }

            captureSession.AddInput(inputDevice);

            NSError errorOut;

            Layer.AddSublayer(_previewLayer);

            captureSession.StartRunning();
        }
Пример #28
0
        public void SetupAndStartCaptureSession()
        {
            //Console.WriteLine ("SetupAndStartCapture Session");

            // Create a shallow queue for buffers going to the display for preview.
            previewBufferQueue = CMBufferQueue.CreateUnsorted(1);

            // Create serial queue for movie writing
            movieWritingQueue = new DispatchQueue("Movie Writing Queue");

            if (captureSession == null)
            {
                SetupCaptureSession();
            }

            NSNotificationCenter.DefaultCenter.AddObserver(AVCaptureSession.DidStopRunningNotification, CaptureSessionStoppedRunningNotification, captureSession);

            if (!captureSession.Running)
            {
                captureSession.StartRunning();
            }
        }
Пример #29
0
        private void SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };



            // create a device input and attach it to the session
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (captureDevice == null)
            {
                RenderImageMessage("Capture device not found.");

                return;
            }
            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                RenderImageMessage("No input device");

                return;
            }
            session.AddInput(input);

            // create a VideoDataOutput and add it to the sesion
            AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();

            settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
            var output = new AVCaptureVideoDataOutput()
            {
                UncompressedVideoSetting = settingUncomp,

                // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
                //MinFrameDuration = new CMTime (1, 15)
            };


            // configure the output
            queue          = new DispatchQueue("myQueue");
            outputRecorder = new OutputRecorder(ImageView);
            output.SetSampleBufferDelegateQueue(outputRecorder, queue);
            session.AddOutput(output);

            session.StartRunning();
        }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            var device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);

            if (device is null)
            {
                this.ShowAlert("无法访问相机", null);
                return;
            }

            var input = AVCaptureDeviceInput.FromDevice(device);

            if (input is null)
            {
                this.ShowAlert("无法访问相机", null);
                return;
            }

            session.AddInput(input);
            try
            {
                var output = new AVCaptureMetadataOutput();
                output.SetDelegate(this, DispatchQueue.MainQueue);
                session.AddOutput(output);

                output.MetadataObjectTypes = AVMetadataObjectType.QRCode;
            }
            catch
            {
                return;
            }

            preview = AVCaptureVideoPreviewLayer.FromSession(session);
            if (preview is null)
            {
                this.ShowAlert("无法显示扫描预览", null);
                return;
            }
            preview.VideoGravity = AVLayerVideoGravity.Resize;
            preview.Frame        = View.Layer.Bounds;
            View.Layer.AddSublayer(preview);

            session.StartRunning();

            codeFrame = new UIView();
            codeFrame.Layer.BorderColor = UIColor.Green.CGColor;
            codeFrame.Layer.BorderWidth = 2;
            View.AddSubview(codeFrame);
            View.BringSubviewToFront(codeFrame);
        }
Пример #31
0
        public override void ViewWillAppear(bool animated)
        {
            base.ViewWillAppear(animated);
            sessionQueue.DispatchAsync(() => {
                switch (setupResult)
                {
                // Only setup observers and start the session running if setup succeeded.
                case AVCamSetupResult.Success:
                    AddObservers();
                    session.StartRunning();
                    sessionRunning = session.Running;
                    break;

                case AVCamSetupResult.CameraNotAuthorized:
                    DispatchQueue.MainQueue.DispatchAsync(() => {
                        var message         = "AVCam doesn't have permission to use the camera, please change privacy settings";
                        var alertController = UIAlertController.Create("AVCam", message, UIAlertControllerStyle.Alert);
                        var cancelAction    = UIAlertAction.Create("OK", UIAlertActionStyle.Cancel, null);
                        alertController.AddAction(cancelAction);
                        // Provide quick access to Settings.
                        alertController.AddAction(UIAlertAction.Create("Settings", UIAlertActionStyle.Default, action => {
                            UIApplication.SharedApplication.OpenUrl(new NSUrl(UIApplication.OpenSettingsUrlString), new NSDictionary(), null);
                        }));
                        PresentViewController(alertController, true, null);
                    });
                    break;

                case AVCamSetupResult.SessionConfigurationFailed:
                    DispatchQueue.MainQueue.DispatchAsync(() => {
                        var message         = "Unable to capture media";
                        var alertController = UIAlertController.Create("AVCam", message, UIAlertControllerStyle.Alert);
                        alertController.AddAction(UIAlertAction.Create("OK", UIAlertActionStyle.Cancel, null));
                        PresentViewController(alertController, true, null);
                    });
                    break;
                }
            });
        }
Пример #32
0
        void Initialize()
        {
            Configuration.IsScanning = true;
            SizeChange();
            CaptureSession = new AVCaptureSession();
            CaptureSession.BeginConfiguration();
            this.Frame   = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width);
            previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
            {
                Frame        = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width),
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
            };
            setPreviewOrientation();
            var videoDevices   = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
            var cameraPosition = AVCaptureDevicePosition.Back;
            //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
            var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);

            if (device == null)
            {
                return;
            }

            NSError error;
            var     input = new AVCaptureDeviceInput(device, out error);

            CaptureSession.AddInput(input);
            CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset640x480;
            Layer.AddSublayer(previewLayer);

            CaptureSession.CommitConfiguration();
            CaptureSession.StartRunning();

            VideoDataOutput = new AVCaptureVideoDataOutput();
            VideoDataOutput.AlwaysDiscardsLateVideoFrames = true;
            VideoDataOutput.WeakVideoSettings             = new CVPixelBufferAttributes {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            }.Dictionary;


            captureVideoDelegate             = new CaptureVideoDelegate();
            captureVideoDelegate.OnDetected += (list) =>
            {
                this.OnDetected?.Invoke(list);
                CaptureSession.StopRunning();
            };
            VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue);

            CaptureSession.AddOutput(VideoDataOutput);
        }
Пример #33
0
        void setupAVCapture()
        {
            session = new AVCaptureSession();
            //AVCaptureSession.Notifications.ObserveRuntimeError((object sender, AVCaptureSessionRuntimeErrorEventArgs e) => {
            //    Console.WriteLine("AVCaptureSessionError"+e.Error);
            //});

            //AVCaptureSession.Notifications.ObserveWasInterrupted((object sender, NSNotificationEventArgs e) => {
            //    Console.WriteLine("Interrupted"+e.Notification);
            //});
            //AVCaptureSession.Notifications.ObserveInterruptionEnded((object sender, NSNotificationEventArgs e) => {
            //    Console.WriteLine("InterruptedEnded" + e.Notification);
            //});
            //AVCaptureSession.Notifications.ObserveDidStopRunning((object sender, NSNotificationEventArgs e) => {
            //    Console.WriteLine("DidStopRunning" + e.Notification);
            //});

            //AVCaptureSession.Notifications.ObserveDidStartRunning((object sender, NSNotificationEventArgs e) => {
            //    Console.WriteLine("DidStartRunning" + e.Notification);
            //});


            session.SessionPreset = AVCaptureSession.PresetHigh;
            previewView.Session   = session;

            updateCameraSelection();
            rootLayer = previewView.Layer;
            rootLayer.MasksToBounds = true;
            // HACK: Remove .ToString() for AVLayerVideoGravity
            // (previewView.Layer as AVCaptureVideoPreviewLayer).VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            (previewView.Layer as AVCaptureVideoPreviewLayer).VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            previewView.Layer.BackgroundColor = UIColor.Black.CGColor;

            setupAVFoundationFaceDetection();
            setUpStilImage();
            //setupVideoOutputCapture();

            if (device != null)
            {
                //

                device.AddObserver(this, (NSString)"videoZoomFactor", (NSKeyValueObservingOptions)0,
                                   VideoZoomFactorContext);
                device.AddObserver(this, (NSString)"rampingVideoZoom", (NSKeyValueObservingOptions)0,
                                   VideoZoomRampingContext);
            }

            session.StartRunning();
        }
        /// <summary>
        /// Creates a camera stream and adds it to the view
        /// </summary>
        private async Task SetupLiveCameraStream()
        {
            captureSession = new AVCaptureSession();


            // SETUP THE PREVIEW OF THE CAPTURE SESSION
            videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
            {
                Frame = this.Frame
            };

            this.Layer.AddSublayer(videoPreviewLayer);


            // SETUP THE INPUT DEVICE FOR THE SESSION
            var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);

            ConfigureCameraForDevice(captureDevice);
            captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            captureSession.AddInput(captureDeviceInput);


            // VIDEO OUTPUT BUFFERING METHOD
            var output   = new AVCaptureVideoDataOutput();
            var settings = new AVVideoSettingsUncompressed
            {
                PixelFormatType = CVPixelFormatType.CV32BGRA
            };

            output.WeakVideoSettings = settings.Dictionary;

            Queue    = new DispatchQueue("ManCamQueue");
            Recorder = new OutputRecorder(this, VideoFrameInterval);
            Recorder.OnFrameRecieved += HandleVideFrameImage;
            output.SetSampleBufferDelegate(Recorder, Queue);
            captureSession.AddOutput(output);

            // UI PREPERATION
            AddTargetOverlay();
            textOutputLabel = new UILabel(new CGRect(targetOverlayView.Frame.Width + 10, 10, 100, 100))
            {
                TextColor = UIColor.White,
                Font      = UIFont.BoldSystemFontOfSize(22)
            };

            this.AddSubview(textOutputLabel);

            captureSession.StartRunning();
        }
Пример #35
0
        //-----------------------------------------------------------------------------------------------------------------------------------------------
        public override void ViewWillAppear(bool animated)
        {
            base.ViewWillAppear(animated);

            switch (setupResult)
            {
            case AVCamSetupResult.Success:
                session.StartRunning();
                break;
            }

            Application.canRotate = true;

            SetLayout(CGSize.Empty);
        }
Пример #36
0
        private void SetupLiveCameraStream()
        {
            _captureSession = new AVCaptureSession();

            AVCaptureDevice captureDevice;

            captureDevice = AVCaptureDevice.GetDefaultDevice(AVCaptureDeviceType.BuiltInDualCamera, AVMediaType.Video, AVCaptureDevicePosition.Back);

            if (captureDevice == null)
            {
                captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            }

            ConfigureCameraForDevice(captureDevice);
            _captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
            if (!_captureSession.CanAddInput(_captureDeviceInput))
            {
                return;
            }

            _capturePhotoOutput = new AVCapturePhotoOutput();
            _capturePhotoOutput.IsHighResolutionCaptureEnabled = true;
            _capturePhotoOutput.IsLivePhotoCaptureEnabled      = false;


            if (!_captureSession.CanAddOutput(_capturePhotoOutput))
            {
                return;
            }

            _captureSession.BeginConfiguration();

            _captureSession.SessionPreset = AVCaptureSession.PresetPhoto;
            _captureSession.AddInput(_captureDeviceInput);
            _captureSession.AddOutput(_capturePhotoOutput);

            _captureSession.CommitConfiguration();

            _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession)
            {
                Frame        = liveCameraStream.Frame,
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
            };

            liveCameraStream.Layer.AddSublayer(_videoPreviewLayer);

            _captureSession.StartRunning();
        }
Пример #37
0
        public void InitAndStartCamera()
        {
            session = new AVCaptureSession
            {
                SessionPreset = AVCaptureSession.PresetMedium
            };
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            NSError error;
            var videoInput = AVCaptureDeviceInput.FromDevice(captureDevice, out error);

            if (videoInput == null || !session.CanAddInput(videoInput)) return;
            session.AddInput(videoInput);
            previewLayer = new AVCaptureVideoPreviewLayer(session) { Frame = rootView.Bounds };
            previewLayer.Connection.VideoOrientation = configDicByRotationChanged[UIApplication.SharedApplication.StatusBarOrientation];
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            cameraView.Layer.AddSublayer(previewLayer);
            session.StartRunning();
        }
        public override void WindowDidLoad()
        {
            base.WindowDidLoad();
            session = new AVCaptureSession() { SessionPreset = AVCaptureSession.PresetMedium };
            var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);
            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");

            }
            session.AddInput(input);
            var captureVideoPreviewLayer = new AVCaptureVideoPreviewLayer(session);

            var contentView = Window.ContentView;
            contentView.WantsLayer = true;
            captureVideoPreviewLayer.Frame = contentView.Bounds;
            contentView.Layer.AddSublayer(captureVideoPreviewLayer);

            session.StartRunning();
        }
		void Initialize ()
		{
			CaptureSession = new AVCaptureSession ();
			previewLayer = new AVCaptureVideoPreviewLayer (CaptureSession) {
				Frame = Bounds,
				VideoGravity = AVLayerVideoGravity.ResizeAspectFill
			};

			var videoDevices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video);
			var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
			var device = videoDevices.FirstOrDefault (d => d.Position == cameraPosition);

			if (device == null) {
				return;
			}

			NSError error;
			var input = new AVCaptureDeviceInput (device, out error);
			CaptureSession.AddInput (input);
			Layer.AddSublayer (previewLayer);
			CaptureSession.StartRunning ();
			IsPreviewing = true;
		}
Пример #40
0
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			session = new AVCaptureSession ();

			var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice(camera);
			session.AddInput(input);

			output = new AVCaptureMetadataOutput();
			var metadataDelegate = new MetadataOutputDelegate();
			output.SetDelegate(metadataDelegate, DispatchQueue.MainQueue);
			session.AddOutput(output);

			output.MetadataObjectTypes = new NSString[] {
				AVMetadataObject.TypeQRCode,
				AVMetadataObject.TypeEAN13Code
			};

			var previewLayer = new AVCaptureVideoPreviewLayer(session);
			//var view = new ContentView(UIColor.LightGray, previewLayer, metadataDelegate);

			previewLayer.MasksToBounds = true;
			previewLayer.VideoGravity = AVCaptureVideoPreviewLayer.GravityResizeAspectFill;
			previewLayer.Frame = UIScreen.MainScreen.Bounds;
			this.View.Layer.AddSublayer(previewLayer);

			metadataDelegate.MetadataFound += (s, e) => {
				session.StopRunning();
				new UIAlertView("Scanned!",e.StringValue, null ,"OK",null).Show();
			};

			session.StartRunning();

		}
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession {
				SessionPreset = AVCaptureSession.PresetMedium
			};

			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (captureDevice == null) {
				Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device");
				return false;
			}
			//Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
			NSError error = null;
			captureDevice.LockForConfiguration (out error);
			if (error != null) {
				Console.WriteLine (error);
				captureDevice.UnlockForConfiguration ();
				return false;
			}

			if (UIDevice.CurrentDevice.CheckSystemVersion (7, 0))
				captureDevice.ActiveVideoMinFrameDuration = new CMTime (1, 15);
			captureDevice.UnlockForConfiguration ();

			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null) {
				Console.WriteLine ("No input - this won't work on the simulator, try a physical device");
				return false;
			}

			session.AddInput (input);

			// create a VideoDataOutput and add it to the sesion
			var settings = new CVPixelBufferAttributes {
				PixelFormatType = CVPixelFormatType.CV32BGRA
			};
			using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) {
				queue = new DispatchQueue ("myQueue");
				outputRecorder = new OutputRecorder ();
				output.SetSampleBufferDelegate (outputRecorder, queue);
				session.AddOutput (output);
			}

			session.StartRunning ();
			return true;
		}
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession();

            AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null) {
                Console.WriteLine("No video camera (in simulator?)");
                return false; // simulator?
            }

            NSError error = null;

            AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error);

            if (input == null)
                Console.WriteLine("Error: " + error);
            else
                session.AddInput(input);

            AVCaptureMetadataOutput output = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(this);
            output.SetDelegate(dg,	MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(output);

            // This could be any list of supported barcode types
            output.MetadataObjectTypes = new NSString[] {AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode};
            // OR you could just accept "all" with the following line;
            //			output.MetadataObjectTypes = output.AvailableMetadataObjectTypes;  // empty
            // DEBUG: use this if you're curious about the available types
            //			foreach (var t in output.AvailableMetadataObjectTypes)
            //				Console.WriteLine(t);

            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session);
            //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height);
            previewLayer.Frame = new RectangleF(0, 0, 320, 290);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            View.Layer.AddSublayer (previewLayer);

            session.StartRunning();

            Console.WriteLine("StartRunning");
            return true;
        }
Пример #43
0
			bool SetupCaptureSession ()
			{
				// configure the capture session for low resolution, change this if your code
				// can cope with more data or volume
				session = new AVCaptureSession () {
					SessionPreset = AVCaptureSession.PresetMedium
				};


				AVCaptureDevice captureDevice = null;
				var videoDevices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video);
				foreach (AVCaptureDevice Device in videoDevices) {
					if (Device.Position == AVCaptureDevicePosition.Front)
					{
						captureDevice = Device;
						break;
					}
				}



				// create a device input and attach it to the session
				if(captureDevice==null){
					captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
				}
				if (captureDevice == null){
					return false;
				}
				//Configure for 15 FPS. Note use of LockForConigfuration()/UnlockForConfiguration()
				NSError error = null;
				captureDevice.LockForConfiguration(out error);

				if(error != null)
				{
					captureDevice.UnlockForConfiguration();
					return false;
				}
				if(UIDevice.CurrentDevice.CheckSystemVersion(7,0))
					captureDevice.ActiveVideoMinFrameDuration = new CMTime (1,15);

			captureDevice.UnlockForConfiguration();

			var input = AVCaptureDeviceInput.FromDevice (captureDevice);

				if (input == null){
					return false;
				}

				session.AddInput (input);

				// create a VideoDataOutput and add it to the sesion
				var output = new AVCaptureVideoDataOutput () {
					VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
				};


				// configure the output
				queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue");
				outputRecorder = new OutputRecorder (_state);
				output.SetSampleBufferDelegate (outputRecorder, queue);
				session.AddOutput (output);

				session.StartRunning ();
				return true;
			}
        private void StartCameraWithCompletionHandler(Action<bool, NSError> completion)
        {
            captureSession = new AVCaptureSession ();
            captureSession.BeginConfiguration ();
            captureDevice = CameraDeviceForPosition (AVCaptureDevicePosition.Back);

            if (captureDevice == null) {
                string message = "Error message back camera - not found";
                string title = "Error";
                ShowErrorMessage (message, title);
                return;
            }

            NSError error;
            AVCaptureDeviceInput deviceInput = AVCaptureDeviceInput.FromDevice (captureDevice, out error);
            if (deviceInput == null) {
                Console.WriteLine ("This error should be handled appropriately in your app -- obtain device input: {0}", error);

                string message = "Error message back camera - can't open.";
                string title = "Error";
                ShowErrorMessage (message, title);
                return;
            }

            captureSession.AddInput (deviceInput);
            stillImageOutput = new AVCaptureStillImageOutput ();

            //Or instead of JPEG, we can use one of the following pixel formats: BGRA, 420f output
            stillImageOutput.OutputSettings = new NSDictionary (AVVideo.CodecKey, AVVideo.CodecJPEG);
            captureSession.AddOutput (stillImageOutput);
            cameraPreviewView.ConfigureCaptureSession (captureSession, stillImageOutput);
            captureSession.SessionPreset = AVCaptureSession.PresetPhoto;

            captureDeviceFormat = captureDevice.ActiveFormat;
            captureSession.CommitConfiguration ();
            captureSession.StartRunning ();
            maxBracketCount = stillImageOutput.MaxBracketedCaptureStillImageCount;
            PrepareBracketsWithCompletionHandler (completion);
        }
Пример #45
0
        public bool StartRecording()
        {
            try
            {
                session = MaybeInitializeSession();
                if (session == null)
                {
                    Failure.Alert("Couldn't initialize session");
                    return false;
                }
                writer = MaybeInitializeAssetWriter();
                if (writer == null)
                {
                    Failure.Alert("Couldn't initialize writer");
                    return false;
                }
                inputWriter = MaybeInitializeInputWriter();
                if (inputWriter == null)
                {
                    Failure.Alert("Couldn't initialize input writer");
                    return false;
                }
                if (!writer.CanAddInput(inputWriter))
                {
                    Failure.Alert("Couldn't add input writer to writer");
                    return false;
                }
                writer.AddInput(inputWriter);

                session.StartRunning();
                return true;
            }
            catch (Exception x)
            {
                Failure.Alert(x.Message);
                return false;
            }
        }
		private void SetupCamera()
		{
			CaptureSession = null;
			CaptureSession = new AVCaptureSession();
			CaptureSession.SessionPreset = AVCaptureSession.PresetPhoto;

			currentDevice = null;
			inputDevice1 = null;
			inputDevice2 = null;

			foreach (AVCaptureDevice device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video))
			{
				if (device.Position == AVCaptureDevicePosition.Front)
				{
					inputDevice1 = device;
				}
				else if (device.Position == AVCaptureDevicePosition.Back)
				{
					inputDevice2 = device;
				}
			}

			NSError error;
			if (inputDevice1.HasFlash)
			{
				inputDevice1.LockForConfiguration(out error);
				inputDevice1.FlashMode = AVCaptureFlashMode.Off;
				FlashButton.TitleLabel.Text = "Flash Off";
			}

			if (inputDevice2.HasFlash)
			{
				inputDevice2.LockForConfiguration(out error);
				inputDevice2.FlashMode = AVCaptureFlashMode.Off;
				FlashButton.TitleLabel.Text = "Flash Off";
			}


			frontCamera = AVCaptureDeviceInput.FromDevice(inputDevice1, out error);
			rearCamera = AVCaptureDeviceInput.FromDevice(inputDevice2, out error);
			currentDevice = inputDevice2;

			if (CaptureSession.CanAddInput(rearCamera))
			{
				CaptureSession.AddInput(rearCamera);
			}

			AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession);
			previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			previewLayer.Frame = View.Frame;
			View.Layer.InsertSublayer(previewLayer, 0);

			StillImageOutput = new AVCaptureStillImageOutput();
			StillImageOutput.OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG);

			CaptureSession.AddOutput(StillImageOutput);

			CaptureSession.StartRunning();
		}
Пример #47
0
		bool SetupCaptureSession ()
		{
			// configure the capture session for low resolution, change this if your code
			// can cope with more data or volume
			session = new AVCaptureSession () {
				SessionPreset = AVCaptureSession.Preset640x480
			};
			
			// create a device input and attach it to the session
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (captureDevice == null){
				Console.WriteLine ("No captureDevice - this won't work on the simulator, try a physical device");
				return false;
			}

			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			if (input == null){
				Console.WriteLine ("No input - this won't work on the simulator, try a physical device");
				return false;
			}
			else
				session.AddInput (input);


			previewLayer = new AVCaptureVideoPreviewLayer(session);

			//Framerate set here (15 fps)
			if (previewLayer.RespondsToSelector(new Selector("connection")))
				previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10);

			previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			previewLayer.Frame = this.Frame;
			previewLayer.Position = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2));

			layerView = new UIView(this.Frame);
			layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
			layerView.Layer.AddSublayer(previewLayer);

			this.AddSubview(layerView);

			ResizePreview(UIApplication.SharedApplication.StatusBarOrientation);

			if (overlayView != null)
			{
				this.AddSubview (overlayView);
				this.BringSubviewToFront (overlayView);

				//overlayView.LayoutSubviews ();
			}

			session.StartRunning ();

			Console.WriteLine ("RUNNING!!!");

			// create a VideoDataOutput and add it to the sesion
			output = new AVCaptureVideoDataOutput () {
				//videoSettings
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
			};

			// configure the output
			queue = new MonoTouch.CoreFoundation.DispatchQueue("ZxingScannerView"); // (Guid.NewGuid().ToString());

			var barcodeReader = new BarcodeReader(null, (img) => 	
			{
				var src = new RGBLuminanceSource(img); //, bmp.Width, bmp.Height);

				//Don't try and rotate properly if we're autorotating anyway
				if (options.AutoRotate.HasValue && options.AutoRotate.Value)
					return src;

				switch (UIDevice.CurrentDevice.Orientation)
				{
					case UIDeviceOrientation.Portrait:
						return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise();
					case UIDeviceOrientation.PortraitUpsideDown:
						return src.rotateCounterClockwise().rotateCounterClockwise().rotateCounterClockwise();
					case UIDeviceOrientation.LandscapeLeft:
						return src;
					case UIDeviceOrientation.LandscapeRight:
						return src;
				}

				return src;

			}, null, null); //(p, w, h, f) => new RGBLuminanceSource(p, w, h, RGBLuminanceSource.BitmapFormat.Unknown));

			if (this.options.TryHarder.HasValue)
			{
				Console.WriteLine("TRY_HARDER: " + this.options.TryHarder.Value);
				barcodeReader.Options.TryHarder = this.options.TryHarder.Value;
			}
			if (this.options.PureBarcode.HasValue)
				barcodeReader.Options.PureBarcode = this.options.PureBarcode.Value;
			if (this.options.AutoRotate.HasValue)
			{
				Console.WriteLine("AUTO_ROTATE: " + this.options.AutoRotate.Value);
				barcodeReader.AutoRotate = this.options.AutoRotate.Value;
			}
			if (!string.IsNullOrEmpty (this.options.CharacterSet))
				barcodeReader.Options.CharacterSet = this.options.CharacterSet;
			if (this.options.TryInverted.HasValue)
				barcodeReader.TryInverted = this.options.TryInverted.Value;

			if (this.options.PossibleFormats != null && this.options.PossibleFormats.Count > 0)
			{
				barcodeReader.Options.PossibleFormats = new List<BarcodeFormat>();
				
				foreach (var pf in this.options.PossibleFormats)
					barcodeReader.Options.PossibleFormats.Add(pf);
			}

			outputRecorder = new OutputRecorder (this.options, img => 
			{
				try
				{
					var started = DateTime.Now;
					var rs = barcodeReader.Decode(img);
					var total = DateTime.Now - started;

					Console.WriteLine("Decode Time: " + total.TotalMilliseconds + " ms");

					if (rs != null)
						resultCallback(rs);
				}
				catch (Exception ex)
				{
					Console.WriteLine("DECODE FAILED: " + ex);
				}
			});

			output.AlwaysDiscardsLateVideoFrames = true;
			output.SetSampleBufferDelegate (outputRecorder, queue);


			Console.WriteLine("SetupCamera Finished");

			session.AddOutput (output);
			//session.StartRunning ();


			if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus))
			{
				NSError err = null;
				if (captureDevice.LockForConfiguration(out err))
				{
					captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus;

					if (captureDevice.FocusPointOfInterestSupported)
						captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f);

					captureDevice.UnlockForConfiguration();
				}
				else
					Console.WriteLine("Failed to Lock for Config: " + err.Description);
			}

			return true;
		}
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			this.View.BackgroundColor = UIColor.White;

			NSError error;

			// Setup detector options.
			var options = new CIDetectorOptions {
				Accuracy = FaceDetectorAccuracy.High,
				// Can give a hint here about the rects to detect. 1.4 would be for A4 sheets of paper for instance.
				AspectRatio = 1.41f,
				
			};

			// Create a rectangle detector. Note that you can also create QR detector or a face detector.
			// Most of this code will also work with other detectors (like streaming to a preview layer and grabbing images).
			this.detector = CIDetector.CreateRectangleDetector (context: null, detectorOptions: options);

			// Create the session. The AVCaptureSession is the managing instance of the whole video handling.
			var captureSession = new AVCaptureSession ()
			{ 
				// Defines what quality we want to use for the images we grab. Photo gives highest resolutions.
				SessionPreset = AVCaptureSession.PresetPhoto
			};

			// Find a suitable AVCaptureDevice for video input.
			var device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
			if (device == null)
			{
				// This will not work on the iOS Simulator - there is no camera. :-)
				throw new InvalidProgramException ("Failed to get AVCaptureDevice for video input!");
			}

			// Create a device input with the device and add it to the session.
			var videoInput = AVCaptureDeviceInput.FromDevice (device, out error);
			if (videoInput == null)
			{
				throw new InvalidProgramException ("Failed to get AVCaptureDeviceInput from AVCaptureDevice!");
			}

			// Let session read from the input, this is our source.
			captureSession.AddInput (videoInput);

			// Create output for the video stream. This is the destination.
			var videoOutput = new AVCaptureVideoDataOutput () {
				AlwaysDiscardsLateVideoFrames = true
			};

			// Define the video format we want to use. Note that Xamarin exposes the CompressedVideoSetting and UncompressedVideoSetting 
			// properties on AVCaptureVideoDataOutput un Unified API, but I could not get these to work. The VideoSettings property is deprecated,
			// so I use the WeakVideoSettings instead which takes an NSDictionary as input.
			this.videoSettingsDict = new NSMutableDictionary ();
			this.videoSettingsDict.Add (CVPixelBuffer.PixelFormatTypeKey, NSNumber.FromUInt32((uint)CVPixelFormatType.CV32BGRA));
			videoOutput.WeakVideoSettings = this.videoSettingsDict;

			// Create a delegate to report back to us when an image has been captured.
			// We want to grab the camera stream and feed it through a AVCaptureVideoDataOutputSampleBufferDelegate
			// which allows us to get notified if a new image is availeble. An implementation of that delegate is VideoFrameSampleDelegate in this project.
			this.sampleBufferDelegate = new VideoFrameSamplerDelegate ();

			// Processing happens via Grand Central Dispatch (GCD), so we need to provide a queue.
			// This is pretty much like a system managed thread (see: http://zeroheroblog.com/ios/concurrency-in-ios-grand-central-dispatch-gcd-dispatch-queues).
			this.sessionQueue =  new DispatchQueue ("AVSessionQueue");

			// Assign the queue and the delegate to the output. Now all output will go through the delegate.
			videoOutput.SetSampleBufferDelegate(this.sampleBufferDelegate, this.sessionQueue);

			// Add output to session.
			captureSession.AddOutput(videoOutput);

			// We also want to visualize the input stream. The raw stream can be fed into an AVCaptureVideoPreviewLayer, which is a subclass of CALayer.
			// A CALayer can be added to a UIView. We add that layer to the controller's main view.
			var layer = this.View.Layer;
			this.videoLayer = AVCaptureVideoPreviewLayer.FromSession (captureSession);
			this.videoLayer.Frame = layer.Bounds;
			layer.AddSublayer (this.videoLayer);

			// All setup! Start capturing!
			captureSession.StartRunning ();

			// This is just for information and allows you to get valid values for the detection framerate. 
			Console.WriteLine ("Available capture framerates:");
			var rateRanges = device.ActiveFormat.VideoSupportedFrameRateRanges;
			foreach (var r in rateRanges)
			{
				Console.WriteLine (r.MinFrameRate + "; " + r.MaxFrameRate + "; " + r.MinFrameDuration + "; " + r.MaxFrameDuration);
			}

			// Configure framerate. Kind of weird way of doing it but the only one that works.
			device.LockForConfiguration (out error);
			// CMTime constructor means: 1 = one second, DETECTION_FPS = how many samples per unit, which is 1 second in this case.
			device.ActiveVideoMinFrameDuration = new CMTime(1, DETECTION_FPS);
			device.ActiveVideoMaxFrameDuration = new CMTime(1, DETECTION_FPS);
			device.UnlockForConfiguration ();

			// Put a small image view at the top left that shows the live image with the detected rectangle(s).
			this.imageViewOverlay = new UIImageView
			{ 
				ContentMode = UIViewContentMode.ScaleAspectFit,
				BackgroundColor = UIColor.Gray
			};
			this.imageViewOverlay.Layer.BorderColor = UIColor.Red.CGColor;
			this.imageViewOverlay.Layer.BorderWidth = 3f;
			this.Add (this.imageViewOverlay);

			// Put another image view top right that shows the image with perspective correction.
			this.imageViewPerspective = new UIImageView
			{ 
				ContentMode = UIViewContentMode.ScaleAspectFit,
				BackgroundColor = UIColor.Gray
			};
			this.imageViewPerspective.Layer.BorderColor = UIColor.Red.CGColor;
			this.imageViewPerspective.Layer.BorderWidth = 3f;
			this.Add (this.imageViewPerspective);

			// Add some lables for information.
			this.mainWindowLbl = new UILabel
			{
				Text = "Live stream from camera. Point camera to a rectangular object.",
				TextAlignment = UITextAlignment.Center
			};
			this.Add (this.mainWindowLbl);

			this.detectionWindowLbl = new UILabel
			{
				Text = "Detected rectangle overlay",
				TextAlignment = UITextAlignment.Center
			};
			this.Add (this.detectionWindowLbl);

			this.perspectiveWindowLbl = new UILabel
			{
				Text = "Perspective corrected",
				TextAlignment = UITextAlignment.Center
			};
			this.Add (this.perspectiveWindowLbl);
		}
Пример #49
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad ();
            weAreRecording = false;
            lblError.Hidden = true;

            btnStartRecording.SetTitle("Start Recording", UIControlState.Normal);

            //Set up session
            session = new AVCaptureSession ();

            //Set up inputs and add them to the session
            //this will only work if using a physical device!

            Console.WriteLine ("getting device inputs");
            try{
                //add video capture device
                device = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
                input = AVCaptureDeviceInput.FromDevice (device);
                session.AddInput (input);

                //add audio capture device
                audioDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Audio);
                audioInput = AVCaptureDeviceInput.FromDevice(audioDevice);
                session.AddInput(audioInput);

            }
            catch(Exception ex){
                //show the label error.  This will always show when running in simulator instead of physical device.
                lblError.Hidden = false;
                return;
            }

            //Set up preview layer (shows what the input device sees)
            Console.WriteLine ("setting up preview layer");
            previewlayer = new AVCaptureVideoPreviewLayer (session);
            previewlayer.Frame = this.View.Bounds;

            //this code makes UI controls sit on top of the preview layer!  Allows you to just place the controls in interface builder
            UIView cameraView = new UIView ();
            cameraView = new UIView ();
            cameraView.Layer.AddSublayer (previewlayer);
            this.View.AddSubview (cameraView);
            this.View.SendSubviewToBack (cameraView);

            Console.WriteLine ("Configuring output");
            output = new AVCaptureMovieFileOutput ();

            long totalSeconds = 10000;
            Int32 preferredTimeScale = 30;
            CMTime maxDuration = new CMTime (totalSeconds, preferredTimeScale);
            output.MinFreeDiskSpaceLimit = 1024 * 1024;
            output.MaxRecordedDuration = maxDuration;

            if (session.CanAddOutput (output)) {
                session.AddOutput (output);
            }

            session.SessionPreset = AVCaptureSession.PresetMedium;

            Console.WriteLine ("About to start running session");

            session.StartRunning ();

            //toggle recording button was pushed.
            btnStartRecording.TouchUpInside += startStopPushed;

            //Console.ReadLine ();
        }
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession () {
                SessionPreset = AVCaptureSession.PresetMedium
            };

            AVCaptureDevice[] capDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);

            AVCaptureDeviceInput input = null;
            if (capDevices.Length != 0) input = AVCaptureDeviceInput.FromDevice (capDevices[0]);
            if (input == null){
                new UIAlertView("Error", "Camera not available", null, "OK", null).Show();
                Console.WriteLine ("Camera not available");
                return false;
            }
            session.AddInput (input);

            var output = new AVCaptureVideoDataOutput () {
                VideoSettings = new AVVideoSettings (CVPixelFormatType.CV32BGRA),
                MinFrameDuration = new CMTime (1, 30)  //second parameter is frames per second
            };

            queue = new MonoTouch.CoreFoundation.DispatchQueue ("myQueue");
            outputRecorder = new OutputRecorder ();
            output.SetSampleBufferDelegateAndQueue (outputRecorder, queue);
            session.AddOutput (output);

            session.StartRunning ();
            return true;
        }
Пример #51
0
bool SetupCaptureSession ()
      {
         // configure the capture session for low resolution, change this if your code
         // can cope with more data or volume
         session = new AVCaptureSession () {
            SessionPreset = AVCaptureSession.PresetMedium
         };

         // create a device input and attach it to the session
         var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
         if (captureDevice == null) {
            Image<Bgr, Byte> img = new Image<Bgr, byte> (512, 512, new Bgr (255, 255, 255));
            CvInvoke.PutText (
               img, 
               "Capture device not found.", 
               new Point (10, 200), 
               FontFace.HersheyComplex, 
               1, 
               new MCvScalar (), 
               2);
            ImageView.Image = img.ToUIImage();
            return false;
         }
         var input = AVCaptureDeviceInput.FromDevice (captureDevice);
         if (input == null){
            Console.WriteLine ("No input device");
            return false;
         }
         session.AddInput (input);

         // create a VideoDataOutput and add it to the sesion
         AVVideoSettingsUncompressed settingUncomp = new AVVideoSettingsUncompressed();
         settingUncomp.PixelFormatType = CVPixelFormatType.CV32BGRA;
         var output = new AVCaptureVideoDataOutput () {
            UncompressedVideoSetting = settingUncomp,

            // If you want to cap the frame rate at a given speed, in this sample: 15 frames per second
            //MinFrameDuration = new CMTime (1, 15)
         };
        

         // configure the output
         queue = new DispatchQueue ("myQueue");
         outputRecorder = new OutputRecorder (ImageView);
         output.SetSampleBufferDelegateQueue(outputRecorder, queue);
         session.AddOutput (output);

         session.StartRunning ();
         return true;
      }
		void SetupAVCapture (NSString sessionPreset)
		{
			if ((videoTextureCache = CVOpenGLESTextureCache.FromEAGLContext (context)) == null){
				Console.WriteLine ("Could not create the CoreVideo TextureCache");
				return;
			}
			session = new AVCaptureSession ();
			session.BeginConfiguration ();
			
			// Preset size
			session.SessionPreset = sessionPreset;
			
			// Input device
			var videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			if (videoDevice == null){
				Console.WriteLine ("No video device");
				return;
			}
			NSError err;
			var input = new AVCaptureDeviceInput (videoDevice, out err);
			if (err != null){
				Console.WriteLine ("Error creating video capture device");
				return;
			}
			session.AddInput (input);
			
			// Create the output device
			var dataOutput = new AVCaptureVideoDataOutput () {
				AlwaysDiscardsLateVideoFrames = true,
				
				// YUV 420, use "BiPlanar" to split the Y and UV planes in two separate blocks of 
				// memory, then we can index 0 to get the Y and 1 for the UV planes in the frame decoding
				VideoSettings = new AVVideoSettings (CVPixelFormatType.CV420YpCbCr8BiPlanarFullRange)
			};
					
			dataOutputDelegate = new DataOutputDelegate (this);

			// 
			// This dispatches the video frames into the main thread, because the OpenGL
			// code is accessing the data synchronously.
			//
			dataOutput.SetSampleBufferDelegateAndQueue (dataOutputDelegate, DispatchQueue.MainQueue);
			session.AddOutput (dataOutput);
			session.CommitConfiguration ();
			session.StartRunning ();
		}