/******************************* MAIN FUNCTIONS *******************************/
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();
			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice (captureDevice);
			CaptureSession = new AVCaptureSession();
			CaptureSession.AddInput (input as AVCaptureInput);

			var captureMetadataOutput = new AVCaptureMetadataOutput();
			metadataDelegate = new MetadataObjectsDelegate();
			metadataDelegate.outer = this;
			captureMetadataOutput.SetDelegate(metadataDelegate, DispatchQueue.MainQueue);
			CaptureSession.AddOutput(captureMetadataOutput);
			captureMetadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode;

			VideoPreviewLayer = new AVCaptureVideoPreviewLayer (CaptureSession);
			VideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
			VideoPreviewLayer.Frame = View.Layer.Bounds;
			View.Layer.AddSublayer (VideoPreviewLayer);

			View.BringSubviewToFront (messageLabel);

			QRCodeFrameView = new UIView ();
			QRCodeFrameView.Layer.BorderColor = UIColor.Green.CGColor;
			QRCodeFrameView.Layer.BorderWidth = 2;
			View.AddSubview (QRCodeFrameView);
			View.BringSubviewToFront (QRCodeFrameView);

			CaptureSession.StartRunning();

			cancelButton.Clicked += (sender, e) => {
				this.DismissViewController (true, null);
			};
		}
        private bool initScanner()
        {
            device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            if (device == null)
            {
                this.Debug("AVCaptureDevice is null");

                return(false);
            }

            input = AVCaptureDeviceInput.FromDevice(device);

            if (input == null)
            {
                this.Debug("AVCaptureDeviceInput is null");

                return(false);
            }

            output = new AVCaptureMetadataOutput();
            output.SetDelegate(this, DispatchQueue.MainQueue);

            session = new AVCaptureSession();
            session.AddInput(input);
            session.AddOutput(output);
            output.MetadataObjectTypes = configuration.Barcodes.ConvertToIOS();

            captureVideoPreviewLayer              = AVCaptureVideoPreviewLayer.FromSession(session);
            captureVideoPreviewLayer.Frame        = CGRect.Empty;
            captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            captureVideoPreviewLayer.Connection.VideoOrientation = getDeviceOrientation();

            return(true);
        }
Exemple #3
0
        public void Start()
        {
            captureSession = new AVCaptureSession();
            previewLayer   = new AVCaptureVideoPreviewLayer(captureSession)
            {
                VideoGravity = AVLayerVideoGravity.ResizeAspectFill,
            };

            try
            {
                var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);
                var input         = AVCaptureDeviceInput.FromDevice(captureDevice);
                var output        = new AVCaptureMetadataOutput();
                var queue         = new DispatchQueue("qrQueue");

                captureSession.AddInput(input);
                captureSession.AddOutput(output);

                output.SetDelegate(this, queue);
                output.MetadataObjectTypes = AVMetadataObjectType.QRCode;

                Layer.AddSublayer(previewLayer);

                captureSession.StartRunning();
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
            }
        }
Exemple #4
0
        void setupAVFoundationFaceDetection()
        {
            faceViews = new Dictionary <int, FaceView>();

            metadataOutput = new AVCaptureMetadataOutput();
            if (!captureSession.CanAddOutput(metadataOutput))
            {
                metadataOutput = null;
                return;
            }

            var metaDataObjectDelegate = new MetaDataObjectDelegate();

            metaDataObjectDelegate.DidOutputMetadataObjectsAction = DidOutputMetadataObjects;

            metadataOutput.SetDelegate(metaDataObjectDelegate, DispatchQueue.MainQueue);
            captureSession.AddOutput(metadataOutput);



            if (!metadataOutput.AvailableMetadataObjectTypes.HasFlag(AVMetadataObjectType.Face))
            {
                //teardownAVFoundationFaceDetection();
                return;
            }

            metadataOutput.MetadataObjectTypes = AVMetadataObjectType.Face;

            //DispatchQueue.MainQueue.DispatchAsync();
            //sessionQueue.DispatchAsync(updateAVFoundationFaceDetection);
            //updateAVFoundationFaceDetection();
        }
        private void InitSession()
        {
            try
            {
                //init capture session
                _AVSession = new AVCaptureSession();

                //check permissions
                var authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);
                if (authorizationStatus != AVAuthorizationStatus.Authorized)
                {
                    return;
                }

                //check capture camera
                var cameras = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
                var camera  = cameras.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back);
                if (camera == null)
                {
                    return;
                }

                //add input to capture session
                _AVDeviceImput = new AVCaptureDeviceInput(camera, out NSError _);
                if (_AVSession.CanAddInput(_AVDeviceImput))
                {
                    _AVSession.AddInput(_AVDeviceImput);
                }
                else
                {
                    return;
                }

                //add output to camera session
                _MetadataObjectsQueue = new DispatchQueue("metadata objects queue");
                _AVMetadataOutput     = new AVCaptureMetadataOutput();
                if (_AVSession.CanAddOutput(_AVMetadataOutput))
                {
                    _AVSession.AddOutput(_AVMetadataOutput);
                }
                else
                {
                    return;
                }
                _AVMetadataOutput.SetDelegate(this, _MetadataObjectsQueue);

                //init the video preview layer and add it to the current view
                _AVVideoPeviewLayer = new AVCaptureVideoPreviewLayer(_AVSession);
                _AVVideoPeviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                _AVVideoPeviewLayer.Frame        = Bounds;
                this.Layer.AddSublayer(_AVVideoPeviewLayer);

                //start capture session
                StartSession(true);
            }
            catch (Exception ex)
            {
                Console.WriteLine("IOS_SCAN | init error", ex);
            }
        }
Exemple #6
0
        private bool InitScanner(BarcodeScanner.BarcodeFormat barcodeType)
        {
            device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video);
            if (device == null)
            {
                return(false);
            }

            input = AVCaptureDeviceInput.FromDevice(device);
            if (input.Device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
            {
                input.Device.LockForConfiguration(out NSError err);
                input.Device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                input.Device.UnlockForConfiguration();
            }

            if (input == null)
            {
                return(false);
            }

            output = new AVCaptureMetadataOutput();
            output.SetDelegate(this, DispatchQueue.MainQueue);

            session = new AVCaptureSession();
            session.AddInput(input);
            session.AddOutput(output);
            output.MetadataObjectTypes = GetBarcodeFormat(barcodeType);

            captureVideoPreviewLayer              = AVCaptureVideoPreviewLayer.FromSession(session);
            captureVideoPreviewLayer.Frame        = CGRect.Empty;
            captureVideoPreviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            captureVideoPreviewLayer.Connection.VideoOrientation = GetDeviceOrientation();
            return(true);
        }
        public void Defaults()
        {
            using (var obj = new AVCaptureMetadataOutput()) {
#if XAMCORE_2_0
                Assert.AreEqual(AVMetadataObjectType.None, obj.AvailableMetadataObjectTypes, "AvailableMetadataObjectTypes");
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");

                Assert.IsNotNull(obj.WeakAvailableMetadataObjectTypes, "WeakAvailableMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakAvailableMetadataObjectTypes.Length, "WeakAvailableMetadataObjectTypes#");
                Assert.IsNotNull(obj.WeakMetadataObjectTypes, "WeakMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakMetadataObjectTypes.Length, "WeakMetadataObjectTypes#");
#else
                Assert.IsNotNull(obj.AvailableMetadataObjectTypes, "AvailableMetadataObjectTypes");
                Assert.AreEqual(0, obj.AvailableMetadataObjectTypes.Length, "AvailableMetadataObjectTypes#");
                Assert.IsNotNull(obj.MetadataObjectTypes, "MetadataObjectTypes");
                Assert.AreEqual(0, obj.MetadataObjectTypes.Length, "MetadataObjectTypes#");
#endif
                if (TestRuntime.CheckSystemVersion(PlatformName.iOS, 7, 0, throwIfOtherPlatform: false))
                {
                    Assert.AreEqual(new RectangleF(0, 0, 1, 1), obj.RectOfInterest, "RectOfInterest");
                }

#if XAMCORE_2_0
                obj.WeakMetadataObjectTypes = null;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
                obj.MetadataObjectTypes = AVMetadataObjectType.None;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
#else
                obj.MetadataObjectTypes = null;
                Assert.IsNotNull(obj.MetadataObjectTypes, "MetadataObjectTypes");
                Assert.AreEqual(0, obj.MetadataObjectTypes.Length, "MetadataObjectTypes#");
#endif
                obj.SetDelegate(null, null);
            }
        }
        void ConfigureSession()
        {
            if (setupResult != SessionSetupResult.Success)
            {
                return;
            }

            session.BeginConfiguration();

            var     videoDevice = DeviceWithMediaType(AVMediaType.Video, AVCaptureDevicePosition.Back);
            NSError err;
            var     vDeviceInput = AVCaptureDeviceInput.FromDevice(videoDevice, out err);

            if (err != null)
            {
                Console.WriteLine($"Could not create video device input: ${err}");
                setupResult = SessionSetupResult.ConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            if (session.CanAddInput(vDeviceInput))
            {
                session.AddInput(vDeviceInput);
                videoDeviceInput = vDeviceInput;
            }
            else
            {
                Console.WriteLine("Could not add video device input to the session");
                setupResult = SessionSetupResult.ConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            // Add metadata output.
            if (session.CanAddOutput(metadataOutput))
            {
                session.AddOutput(metadataOutput);

                // Set this view controller as the delegate for metadata objects.
                metadataOutput.SetDelegate(this, metadataObjectsQueue);
                metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes;                 // Use all metadata object types by default.
                metadataOutput.RectOfInterest      = CGRect.Empty;
            }
            else
            {
                Console.WriteLine("Could not add metadata output to the session");
                setupResult = SessionSetupResult.ConfigurationFailed;
                session.CommitConfiguration();
                return;
            }

            session.CommitConfiguration();
        }
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession();

            AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null)
            {
                Console.WriteLine("No video camera (in simulator?)");
                return(false);                // simulator?
            }

            NSError error = null;

            AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error);

            if (input == null)
            {
                Console.WriteLine("Error: " + error);
            }
            else
            {
                session.AddInput(input);
            }

            AVCaptureMetadataOutput output = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(this);

            output.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(output);

            // This could be any list of supported barcode types
            output.MetadataObjectTypes = new NSString[] { AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode };
            // OR you could just accept "all" with the following line;
//			output.MetadataObjectTypes = output.AvailableMetadataObjectTypes;  // empty
            // DEBUG: use this if you're curious about the available types
//			foreach (var t in output.AvailableMetadataObjectTypes)
//				Console.WriteLine(t);


            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session);

            //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height);
            previewLayer.Frame        = new RectangleF(0, 0, 320, 290);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            View.Layer.AddSublayer(previewLayer);

            session.StartRunning();

            Console.WriteLine("StartRunning");
            return(true);
        }
Exemple #10
0
            private CaptureSession(SelfView parent, AVCaptureDeviceInput input, AVCaptureMetadataOutput output)
            {
                this.parent  = parent;
                this.queue   = new DispatchQueue("myQueue");
                this.session = new AVCaptureSession {
                    SessionPreset = AVCaptureSession.PresetMedium
                };

                session.AddInput(input);

                output.SetDelegate(this, queue);
                session.AddOutput(output);
            }
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            var device = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);

            if (device is null)
            {
                this.ShowAlert("无法访问相机", null);
                return;
            }

            var input = AVCaptureDeviceInput.FromDevice(device);

            if (input is null)
            {
                this.ShowAlert("无法访问相机", null);
                return;
            }

            session.AddInput(input);
            try
            {
                var output = new AVCaptureMetadataOutput();
                output.SetDelegate(this, DispatchQueue.MainQueue);
                session.AddOutput(output);

                output.MetadataObjectTypes = AVMetadataObjectType.QRCode;
            }
            catch
            {
                return;
            }

            preview = AVCaptureVideoPreviewLayer.FromSession(session);
            if (preview is null)
            {
                this.ShowAlert("无法显示扫描预览", null);
                return;
            }
            preview.VideoGravity = AVLayerVideoGravity.Resize;
            preview.Frame        = View.Layer.Bounds;
            View.Layer.AddSublayer(preview);

            session.StartRunning();

            codeFrame = new UIView();
            codeFrame.Layer.BorderColor = UIColor.Green.CGColor;
            codeFrame.Layer.BorderWidth = 2;
            View.AddSubview(codeFrame);
            View.BringSubviewToFront(codeFrame);
        }
Exemple #12
0
        public void SetupCapture()
        {
            this.session = new AVCaptureSession();
            device       = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
            //更改这个设置的时候必须先锁定设备,修改完后再解锁,否则崩溃
//			device.LockForConfiguration(out error);
//			//设置闪光灯为自动
//			device.FlashMode = AVCaptureFlashMode.Auto;
//			device.UnlockForConfiguration();

            this.videoInput = new AVCaptureDeviceInput(device, out error);
            if (null != error)
            {
                Console.WriteLine("error=" + error);
            }

            this.metadataOutput = new AVCaptureMetadataOutput();

            if (this.session.CanAddInput(this.videoInput))
            {
                this.session.AddInput(this.videoInput);
            }
            if (this.session.CanAddOutput(this.metadataOutput))
            {
                this.session.AddOutput(this.metadataOutput);
            }

            // 创建dispatch queue.
            DispatchQueue dispatchQueue = new DispatchQueue("kScanQRCodeQueueName");

            metadataOutput.SetDelegate(QRCodeGlobalObject.TheAppDel, dispatchQueue);

            // 设置元数据类型 AVMetadataObjectTypeQRCode
            metadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode;

            //初始化预览图层
            this.previewLayer = new AVCaptureVideoPreviewLayer(this.session);
            this.previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspect;
            this.previewLayer.Frame        = new CGRect(0, 0, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Height);
            this.Layer.MasksToBounds       = true;
            this.Layer.InsertSublayer(previewLayer, 0);
        }
Exemple #13
0
        void setupCaptureSession()
        {
            if (CaptureSession != null)
            {
                return;
            }

            CaptureSession = new AVCaptureSession();

            NSNotificationCenter.DefaultCenter.AddObserver(null, captureSessionNotification, CaptureSession);

            applicationWillEnterForegroundNotificationObserver =
                NSNotificationCenter.DefaultCenter.AddObserver(UIApplication.WillEnterForegroundNotification.ToString(),
                                                               UIApplication.SharedApplication,
                                                               NSOperationQueue.CurrentQueue, delegate(NSNotification notification) {
                applicationWillEnterForeground();
            });

            videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            NSError error;

            videoInput = new AVCaptureDeviceInput(videoDevice, out error);
            if (CaptureSession.CanAddInput(videoInput))
            {
                CaptureSession.AddInput(videoInput);
            }

            metadataOutput = new AVCaptureMetadataOutput();

            var metadataQueue = new DispatchQueue("com.AVCam.metadata");

            metadataObjectsDelegate = new MetadataObjectsDelegate {
                DidOutputMetadataObjectsAction = DidOutputMetadataObjects
            };
            metadataOutput.SetDelegate(metadataObjectsDelegate, metadataQueue);

            if (CaptureSession.CanAddOutput(metadataOutput))
            {
                CaptureSession.AddOutput(metadataOutput);
            }
        }
Exemple #14
0
        /// <summary>
        /// Setups the face detection.
        /// </summary>
        protected void SetupFaceDetection()
        {
            FaceDetectionOutput = new AVCaptureMetadataOutput();
            if (Session.CanAddOutput(FaceDetectionOutput))
            {
                Session.AddOutput(FaceDetectionOutput);

                if (FaceDetectionOutput.AvailableMetadataObjectTypes.HasFlag(
                        AVMetadataObjectType.Face))
                {
                    FaceDetectionOutput.MetadataObjectTypes = AVMetadataObjectType.Face;
                    FaceDetectionOutput.SetDelegate(this, DispatchQueue.MainQueue);
                }
                else
                {
                    Session.RemoveOutput(FaceDetectionOutput);
                    FaceDetectionOutput.Dispose();
                    FaceDetectionOutput = null;
                }
            }
        }
Exemple #15
0
        public void Defaults()
        {
            if (!TestRuntime.CheckSystemAndSDKVersion(6, 0))
            {
                Assert.Ignore("requires iOS6+");
            }

            using (var obj = new AVCaptureMetadataOutput()) {
#if XAMCORE_2_0
                Assert.AreEqual(AVMetadataObjectType.None, obj.AvailableMetadataObjectTypes, "AvailableMetadataObjectTypes");
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");

                Assert.IsNotNull(obj.WeakAvailableMetadataObjectTypes, "WeakAvailableMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakAvailableMetadataObjectTypes.Length, "WeakAvailableMetadataObjectTypes#");
                Assert.IsNotNull(obj.WeakMetadataObjectTypes, "WeakMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakMetadataObjectTypes.Length, "WeakMetadataObjectTypes#");
#else
                Assert.IsNotNull(obj.AvailableMetadataObjectTypes, "AvailableMetadataObjectTypes");
                Assert.AreEqual(0, obj.AvailableMetadataObjectTypes.Length, "AvailableMetadataObjectTypes#");
                Assert.IsNotNull(obj.MetadataObjectTypes, "MetadataObjectTypes");
                Assert.AreEqual(0, obj.MetadataObjectTypes.Length, "MetadataObjectTypes#");
#endif
                if (TestRuntime.CheckSystemAndSDKVersion(7, 0))
                {
                    Assert.AreEqual(new RectangleF(0, 0, 1, 1), obj.RectOfInterest, "RectOfInterest");
                }

#if XAMCORE_2_0
                obj.WeakMetadataObjectTypes = null;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
                obj.MetadataObjectTypes = AVMetadataObjectType.None;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
#else
                obj.MetadataObjectTypes = null;
                Assert.IsNotNull(obj.MetadataObjectTypes, "MetadataObjectTypes");
                Assert.AreEqual(0, obj.MetadataObjectTypes.Length, "MetadataObjectTypes#");
#endif
                obj.SetDelegate(null, null);
            }
        }
		public override void ViewDidLoad ()
		{
			base.ViewDidLoad ();

			session = new AVCaptureSession ();

			var camera = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
			var input = AVCaptureDeviceInput.FromDevice(camera);
			session.AddInput(input);

			output = new AVCaptureMetadataOutput();
			var metadataDelegate = new MetadataOutputDelegate();
			output.SetDelegate(metadataDelegate, DispatchQueue.MainQueue);
			session.AddOutput(output);

			output.MetadataObjectTypes = new NSString[] {
				AVMetadataObject.TypeQRCode,
				AVMetadataObject.TypeEAN13Code
			};

			var previewLayer = new AVCaptureVideoPreviewLayer(session);
			//var view = new ContentView(UIColor.LightGray, previewLayer, metadataDelegate);

			previewLayer.MasksToBounds = true;
			previewLayer.VideoGravity = AVCaptureVideoPreviewLayer.GravityResizeAspectFill;
			previewLayer.Frame = UIScreen.MainScreen.Bounds;
			this.View.Layer.AddSublayer(previewLayer);

			metadataDelegate.MetadataFound += (s, e) => {
				session.StopRunning();
				new UIAlertView("Scanned!",e.StringValue, null ,"OK",null).Show();
			};

			session.StartRunning();

		}
Exemple #17
0
        void setupCaptureSession()
        {
            //Create a device for capturing Barcodes
            var captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaTypes.Video);

            //Configure the dvice for something fancy autofocus stuffs
            ConfigureCameraForDevice(captureDevice);
            //Create an input from that device - meaning to instaniate the device to make an input node... err something like that
            var captureInput = AVCaptureDeviceInput.FromDevice(captureDevice);

            //Add the input to the session
            session.AddInput(captureInput);

            //Create a preview layer for the view
            var previewLayer = AVCaptureVideoPreviewLayer.FromSession(session);

            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            previewLayer.Frame        = vie_Preview_cam.Frame;

            //Add the preview layer to the View for the camera uiview
            vie_Preview_cam.Layer.AddSublayer(previewLayer);

            //Assign who's going to handle the metadataoutput
            var metadataoutput = new AVCaptureMetadataOutput();

            //Set delegate
            metadataoutput.SetDelegate(this, CoreFoundation.DispatchQueue.MainQueue);
            //Add the metadataoutput to session
            session.AddOutput(metadataoutput);

            //Assign which type of Codes will be read,
            metadataoutput.MetadataObjectTypes = AVMetadataObjectType.QRCode;

            //Start the Session
            session.StartRunning();
        }
Exemple #18
0
        public void Defaults()
        {
            using (var obj = new AVCaptureMetadataOutput()) {
                Assert.AreEqual(AVMetadataObjectType.None, obj.AvailableMetadataObjectTypes, "AvailableMetadataObjectTypes");
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");

                Assert.IsNotNull(obj.WeakAvailableMetadataObjectTypes, "WeakAvailableMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakAvailableMetadataObjectTypes.Length, "WeakAvailableMetadataObjectTypes#");
                Assert.IsNotNull(obj.WeakMetadataObjectTypes, "WeakMetadataObjectTypes");
                Assert.AreEqual(0, obj.WeakMetadataObjectTypes.Length, "WeakMetadataObjectTypes#");
                if (TestRuntime.CheckSystemVersion(PlatformName.iOS, 7, 0, throwIfOtherPlatform: false))
                {
                    Assert.AreEqual(new CGRect(0, 0, 1, 1), obj.RectOfInterest, "RectOfInterest");
                }

#if !__MACCATALYST__ // https://github.com/xamarin/maccore/issues/2345
                obj.WeakMetadataObjectTypes = null;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
                obj.MetadataObjectTypes = AVMetadataObjectType.None;
                Assert.AreEqual(AVMetadataObjectType.None, obj.MetadataObjectTypes, "MetadataObjectTypes");
                obj.SetDelegate(null, null);
#endif // !__MACCATALYST__
            }
        }
        bool SetupCaptureSession()
        {
            var availableResolutions = new List <CameraResolution> ();

            var consideredResolutions = new Dictionary <NSString, CameraResolution> {
                { AVCaptureSession.Preset352x288, new CameraResolution   {
                      Width = 352, Height = 288
                  } },
                { AVCaptureSession.PresetMedium, new CameraResolution    {
                      Width = 480, Height = 360
                  } },                                                                                      //480x360
                { AVCaptureSession.Preset640x480, new CameraResolution   {
                      Width = 640, Height = 480
                  } },
                { AVCaptureSession.Preset1280x720, new CameraResolution  {
                      Width = 1280, Height = 720
                  } },
                { AVCaptureSession.Preset1920x1080, new CameraResolution {
                      Width = 1920, Height = 1080
                  } }
            };

            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.Preset640x480
            };

            // create a device input and attach it to the session
//			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
            AVCaptureDevice captureDevice = null;
            var             devices       = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);

            foreach (var device in devices)
            {
                captureDevice = device;
                if (options.UseFrontCameraIfAvailable.HasValue &&
                    options.UseFrontCameraIfAvailable.Value &&
                    device.Position == AVCaptureDevicePosition.Front)
                {
                    break;                     //Front camera successfully set
                }
                else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value))
                {
                    break;                     //Back camera succesfully set
                }
            }
            if (captureDevice == null)
            {
                Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                if (overlayView != null)
                {
                    this.AddSubview(overlayView);
                    this.BringSubviewToFront(overlayView);
                }
                return(false);
            }

            CameraResolution resolution = null;

            // Find resolution
            // Go through the resolutions we can even consider
            foreach (var cr in consideredResolutions)
            {
                // Now check to make sure our selected device supports the resolution
                // so we can add it to the list to pick from
                if (captureDevice.SupportsAVCaptureSessionPreset(cr.Key))
                {
                    availableResolutions.Add(cr.Value);
                }
            }

            resolution = options.GetResolution(availableResolutions);

            // See if the user selected a resolution
            if (resolution != null)
            {
                // Now get the preset string from the resolution chosen
                var preset = (from c in consideredResolutions
                              where c.Value.Width == resolution.Width &&
                              c.Value.Height == resolution.Height
                              select c.Key).FirstOrDefault();

                // If we found a matching preset, let's set it on the session
                if (!string.IsNullOrEmpty(preset))
                {
                    session.SessionPreset = preset;
                }
            }

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");
                if (overlayView != null)
                {
                    this.AddSubview(overlayView);
                    this.BringSubviewToFront(overlayView);
                }
                return(false);
            }
            else
            {
                session.AddInput(input);
            }


            foundResult = false;
            //Detect barcodes with built in avcapture stuff
            AVCaptureMetadataOutput metadataOutput = new AVCaptureMetadataOutput();

            captureDelegate = new CaptureDelegate(metaDataObjects =>
            {
                if (!analyzing)
                {
                    return;
                }

                //Console.WriteLine("Found MetaData Objects");

                var msSinceLastPreview = (DateTime.UtcNow - lastAnalysis).TotalMilliseconds;

                if (msSinceLastPreview < options.DelayBetweenAnalyzingFrames ||
                    (wasScanned && msSinceLastPreview < options.DelayBetweenContinuousScans) ||
                    working)
                //|| CancelTokenSource.IsCancellationRequested)
                {
                    return;
                }

                working      = true;
                wasScanned   = false;
                lastAnalysis = DateTime.UtcNow;

                var mdo = metaDataObjects.FirstOrDefault();

                if (mdo == null)
                {
                    return;
                }

                var readableObj = mdo as AVMetadataMachineReadableCodeObject;

                if (readableObj == null)
                {
                    return;
                }

                wasScanned = true;

                var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString());

                var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat);

                resultCallback(rs);

                working = false;
            });

            metadataOutput.SetDelegate(captureDelegate, DispatchQueue.MainQueue);
            session.AddOutput(metadataOutput);

            //Setup barcode formats
            if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0)
            {
                #if __UNIFIED__
                var formats = AVMetadataObjectType.None;

                foreach (var f in ScanningOptions.PossibleFormats)
                {
                    formats |= AVCaptureBarcodeFormatFromZXingBarcodeFormat(f);
                }

                formats &= ~AVMetadataObjectType.None;

                metadataOutput.MetadataObjectTypes = formats;
                #else
                var formats = new List <string> ();

                foreach (var f in ScanningOptions.PossibleFormats)
                {
                    formats.AddRange(AVCaptureBarcodeFormatFromZXingBarcodeFormat(f));
                }

                metadataOutput.MetadataObjectTypes = (from f in formats.Distinct() select new NSString(f)).ToArray();
                #endif
            }
            else
            {
                metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes;
            }



            previewLayer = new AVCaptureVideoPreviewLayer(session);

            //Framerate set here (15 fps)
            if (previewLayer.RespondsToSelector(new Selector("connection")))
            {
                if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
                {
                    var perf1 = PerformanceCounter.Start();

                    NSError lockForConfigErr = null;

                    captureDevice.LockForConfiguration(out lockForConfigErr);
                    if (lockForConfigErr == null)
                    {
                        captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10);
                        captureDevice.UnlockForConfiguration();
                    }

                    PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms");
                }
                else
                {
                    previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10);
                }
            }

                        #if __UNIFIED__
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                        #else
            previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill;
                        #endif
            previewLayer.Frame    = new CGRect(0, 0, this.Frame.Width, this.Frame.Height);
            previewLayer.Position = new CGPoint(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2));

            layerView = new UIView(new CGRect(0, 0, this.Frame.Width, this.Frame.Height));
            layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
            layerView.Layer.AddSublayer(previewLayer);

            this.AddSubview(layerView);

            ResizePreview(UIApplication.SharedApplication.StatusBarOrientation);

            if (overlayView != null)
            {
                this.AddSubview(overlayView);
                this.BringSubviewToFront(overlayView);

                //overlayView.LayoutSubviews ();
            }

            session.StartRunning();

            Console.WriteLine("RUNNING!!!");



            //output.AlwaysDiscardsLateVideoFrames = true;


            Console.WriteLine("SetupCamera Finished");

            //session.AddOutput (output);
            //session.StartRunning ();


            if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
            {
                NSError err = null;
                if (captureDevice.LockForConfiguration(out err))
                {
                    if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
                    {
                        captureDevice.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
                    }
                    else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.AutoFocus))
                    {
                        captureDevice.FocusMode = AVCaptureFocusMode.AutoFocus;
                    }

                    if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure))
                    {
                        captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure;
                    }
                    else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose))
                    {
                        captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose;
                    }

                    if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance))
                    {
                        captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance;
                    }
                    else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance))
                    {
                        captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance;
                    }

                    if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported)
                    {
                        captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
                    }

                    if (captureDevice.FocusPointOfInterestSupported)
                    {
                        captureDevice.FocusPointOfInterest = new CGPoint(0.5f, 0.5f);
                    }

                    if (captureDevice.ExposurePointOfInterestSupported)
                    {
                        captureDevice.ExposurePointOfInterest = new CGPoint(0.5f, 0.5f);
                    }

                    captureDevice.UnlockForConfiguration();
                }
                else
                {
                    Console.WriteLine("Failed to Lock for Config: " + err.Description);
                }
            }

            return(true);
        }
Exemple #20
0
            public override void ViewDidLoad()
            {
                base.ViewDidLoad();
                View.BackgroundColor = UIColor.Black;

                NSError error = null;

                _captureSession = new AVCaptureSession();
                CameraMetaDataDelegate del = null;

                var             authStatus    = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);
                AVCaptureDevice captureDevice = null;

                // check authorization status
                if (authStatus == AVAuthorizationStatus.Authorized)
                {
                    captureDevice = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); // update for iOS 13
                }
                else if (authStatus == AVAuthorizationStatus.NotDetermined)
                {
                    AVCaptureDevice.RequestAccessForMediaType(AVMediaType.Video, (granted) =>
                    {
                        if (!granted)
                        {
                            iApp.Log.Error("ViewDidLoadBase ScanLayer RequestAccessForMediaType not granted!");
                        }
                        else
                        {
                            iApp.Log.Error("ViewDidLoadBase ScanLayer RequestAccessForMediaType granted!");
                        }
                    });
                }
                else
                {
                    iApp.Log.Error("Not Authorized! Status: " + authStatus.ToString());
                }
                if (captureDevice != null)
                {
                    var videoInput = AVCaptureDeviceInput.FromDevice(captureDevice, out error);
                    if (videoInput != null)
                    {
                        _captureSession.AddInput(videoInput);
                    }
                    else
                    {
                        iApp.Log.Error("Video capture error: " + error.LocalizedDescription);
                    }

                    var metaDataOutput = new AVCaptureMetadataOutput();
                    _captureSession.AddOutput(metaDataOutput);

                    del = new CameraMetaDataDelegate(this, _layer);
                    metaDataOutput.SetDelegate(del, CoreFoundation.DispatchQueue.MainQueue);

                    //metaDataOutput.MetadataObjectTypes = metaDataOutput.AvailableMetadataObjectTypes;

                    metaDataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode | AVMetadataObjectType.Code128Code | AVMetadataObjectType.UPCECode | AVMetadataObjectType.EAN13Code;


                    _videoPreviewLayer = new AVCaptureVideoPreviewLayer(_captureSession)
                    {
                        Frame       = View.Bounds,
                        Orientation = (AVCaptureVideoOrientation)InterfaceOrientation,
                    };
                    View.Layer.AddSublayer(_videoPreviewLayer);
                    var image = TouchStyle.ImageFromResource("barcode-overlay-sm.png");
                    _imageOverlay = new UIImageView(image)
                    {
                        Frame            = View.Frame,
                        ContentMode      = UIViewContentMode.Center,
                        AutoresizingMask = UIViewAutoresizing.FlexibleMargins,
                    };
                    View.Add(_imageOverlay);

                    // preload this, and display when scan event occurs
                    var imageScanBlocked = TouchStyle.ImageFromResource("barcode-scanblocked-sm.png");
                    _imageOverlayScanBlocked = new UIImageView(imageScanBlocked)
                    {
                        Frame            = View.Frame,
                        ContentMode      = UIViewContentMode.Center,
                        AutoresizingMask = UIViewAutoresizing.FlexibleMargins,
                        Hidden           = true,
                    };
                    View.Add(_imageOverlayScanBlocked);
                }
                else
                {
                    //TODO: Add "Scanner currently not active overlay Image"
                    iApp.Log.Error("null capture device!");
                }

                nfloat startVerticalLoc = UIScreen.MainScreen.Bounds.Height - LastValueScanOverlay.ViewHeight;

                _lastScanOverlay = new LastValueScanOverlay(startVerticalLoc, _layerFont);
                View.Add(_lastScanOverlay);

                NavigationItem.LeftBarButtonItem = new UIBarButtonItem("Done", UIBarButtonItemStyle.Done, delegate {
                    string scannedBarcodes = string.Empty;
                    if (del != null && del.Buffer != null && del.Buffer.CurrentBuffer != null)
                    {
                        foreach (var s in del.Buffer.CurrentBuffer)
                        {
                            scannedBarcodes += s + "\r\n";
                        }
                    }
                    if (_callback.Parameters == null)
                    {
                        _callback.Parameters = new Dictionary <string, string>();
                    }
                    _callback.Parameters[_barcodeValueKey] = scannedBarcodes;
                    iApp.Navigate(_callback);
                    ModalManager.EnqueueModalTransition(TouchFactory.Instance.TopViewController, null, true);
                });
            }
        /// <summary>
        /// 스캐너를 초기화합니다.
        /// </summary>
        /// <param name="barcodeFormat">인식할 바코드 포멧</param>
        /// <param name="sessionPreset">해상도</param>
        /// <returns></returns>
        private bool InitScanner(AVMetadataObjectType barcodeFormat, NSString sessionPreset)
        {
            // 카메라 접근 권한 확인
            if (!IsCameraAuthorized)
            {
                this.Write("카메라 사용이 허용되지 않습니다.");
                return(false);
            }

            // 후면 카메라를 캡처할 장치로 설정
            Device = AVCaptureDevice
                     .DevicesWithMediaType(AVMediaType.Video)
                     .FirstOrDefault(e => e.Position == AVCaptureDevicePosition.Back);
            if (Device == null)
            {
                this.Write("후면 카메라가 없습니다.");
                return(false);
            }

            // 입력 설정
            Input = AVCaptureDeviceInput.FromDevice(Device);
            if (Input == null)
            {
                this.Write("AVCaptureDeviceInput이 null 입니다.");
                return(false);
            }

            // 출력 설정
            CaptureDelegate = new CaptureDelegate((metadataObjects) =>
            {
                if (BarcodeDetected == null)
                {
                    return;
                }

                foreach (var metadata in metadataObjects)
                {
                    var data = ((AVMetadataMachineReadableCodeObject)metadata).StringValue;
                    BarcodeDetected?.Invoke(new BarcodeData(metadata.Type.ToTmonFormat(), data));
                }
            });
            Output = new AVCaptureMetadataOutput();
            Output.SetDelegate(CaptureDelegate, DispatchQueue.MainQueue);

            // 세션 설정
            Session = new AVCaptureSession()
            {
                SessionPreset = sessionPreset,
            };
            Session.AddInput(Input);
            Session.AddOutput(Output);

            // 검출할 바코드 포멧 설정(중요 : 반드시 세션 설정 뒤에 와야함)
            Output.MetadataObjectTypes = barcodeFormat;

            // 프리뷰어 설정
            Previewer              = AVCaptureVideoPreviewLayer.FromSession(Session);
            Previewer.Frame        = CGRect.Empty;
            Previewer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            Previewer.Connection.VideoOrientation = DeviceOrientation;

            return(true);
        }
        bool SetupCaptureSession()
        {
            session = new AVCaptureSession();

            AVCaptureDevice device = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);

            if (device == null) {
                Console.WriteLine("No video camera (in simulator?)");
                return false; // simulator?
            }

            NSError error = null;

            AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error);

            if (input == null)
                Console.WriteLine("Error: " + error);
            else
                session.AddInput(input);

            AVCaptureMetadataOutput output = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(this);
            output.SetDelegate(dg,	MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(output);

            // This could be any list of supported barcode types
            output.MetadataObjectTypes = new NSString[] {AVMetadataObject.TypeQRCode, AVMetadataObject.TypeAztecCode};
            // OR you could just accept "all" with the following line;
            //			output.MetadataObjectTypes = output.AvailableMetadataObjectTypes;  // empty
            // DEBUG: use this if you're curious about the available types
            //			foreach (var t in output.AvailableMetadataObjectTypes)
            //				Console.WriteLine(t);

            AVCaptureVideoPreviewLayer previewLayer = new AVCaptureVideoPreviewLayer(session);
            //previewLayer.Frame = new RectangleF(0,0, View.Frame.Size.Width, View.Frame.Size.Height);
            previewLayer.Frame = new RectangleF(0, 0, 320, 290);
            previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill.ToString();
            View.Layer.AddSublayer (previewLayer);

            session.StartRunning();

            Console.WriteLine("StartRunning");
            return true;
        }
		void setupCaptureSession ()
		{
			if (CaptureSession != null)
				return;

			CaptureSession = new AVCaptureSession ();

			NSNotificationCenter.DefaultCenter.AddObserver (null, captureSessionNotification, CaptureSession);

			applicationWillEnterForegroundNotificationObserver = 
				NSNotificationCenter.DefaultCenter.AddObserver (UIApplication.WillEnterForegroundNotification.ToString (),
			                                                    UIApplication.SharedApplication,
					NSOperationQueue.CurrentQueue, delegate(NSNotification notification) {
				applicationWillEnterForeground ();                                          	
			});

			videoDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);

			NSError error;
			videoInput = new AVCaptureDeviceInput (videoDevice, out error);
			if (CaptureSession.CanAddInput (videoInput))
				CaptureSession.AddInput (videoInput);

			metadataOutput = new AVCaptureMetadataOutput ();

			var metadataQueue = new DispatchQueue ("com.AVCam.metadata");
			metadataObjectsDelegate = new MetadataObjectsDelegate {
				DidOutputMetadataObjectsAction = DidOutputMetadataObjects
			};
			metadataOutput.SetDelegate (metadataObjectsDelegate, metadataQueue);

			if (CaptureSession.CanAddOutput (metadataOutput))
				CaptureSession.AddOutput (metadataOutput);
		}
        bool SetupCaptureSession()
        {
            // configure the capture session for low resolution, change this if your code
            // can cope with more data or volume
            session = new AVCaptureSession()
            {
                SessionPreset = AVCaptureSession.Preset640x480
            };

            // create a device input and attach it to the session
//			var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType (AVMediaType.Video);
            AVCaptureDevice captureDevice = null;
            var             devices       = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);

            foreach (var device in devices)
            {
                captureDevice = device;
                if (options.UseFrontCameraIfAvailable.HasValue &&
                    options.UseFrontCameraIfAvailable.Value &&
                    device.Position == AVCaptureDevicePosition.Front)
                {
                    break;                     //Front camera successfully set
                }
                else if (device.Position == AVCaptureDevicePosition.Back && (!options.UseFrontCameraIfAvailable.HasValue || !options.UseFrontCameraIfAvailable.Value))
                {
                    break;                     //Back camera succesfully set
                }
            }
            if (captureDevice == null)
            {
                Console.WriteLine("No captureDevice - this won't work on the simulator, try a physical device");
                if (overlayView != null)
                {
                    this.AddSubview(overlayView);
                    this.BringSubviewToFront(overlayView);
                }
                return(false);
            }

            var input = AVCaptureDeviceInput.FromDevice(captureDevice);

            if (input == null)
            {
                Console.WriteLine("No input - this won't work on the simulator, try a physical device");
                if (overlayView != null)
                {
                    this.AddSubview(overlayView);
                    this.BringSubviewToFront(overlayView);
                }
                return(false);
            }
            else
            {
                session.AddInput(input);
            }


            foundResult = false;
            //Detect barcodes with built in avcapture stuff
            AVCaptureMetadataOutput metadataOutput = new AVCaptureMetadataOutput();

            var dg = new CaptureDelegate(metaDataObjects =>
            {
                if (foundResult)
                {
                    return;
                }

                //Console.WriteLine("Found MetaData Objects");

                var mdo = metaDataObjects.FirstOrDefault();

                if (mdo == null)
                {
                    return;
                }

                var readableObj = mdo as AVMetadataMachineReadableCodeObject;

                if (readableObj == null)
                {
                    return;
                }

                foundResult = true;

                //Console.WriteLine("Barcode: " + readableObj.StringValue);

                var zxingFormat = ZXingBarcodeFormatFromAVCaptureBarcodeFormat(readableObj.Type.ToString());

                var rs = new ZXing.Result(readableObj.StringValue, null, null, zxingFormat);

                resultCallback(rs);
            });

            metadataOutput.SetDelegate(dg, MonoTouch.CoreFoundation.DispatchQueue.MainQueue);
            session.AddOutput(metadataOutput);

            //Setup barcode formats
            if (ScanningOptions.PossibleFormats != null && ScanningOptions.PossibleFormats.Count > 0)
            {
                var formats = new List <string> ();

                foreach (var f in ScanningOptions.PossibleFormats)
                {
                    formats.AddRange(AVCaptureBarcodeFormatFromZXingBarcodeFormat(f));
                }

                metadataOutput.MetadataObjectTypes = (from f in formats.Distinct() select new NSString(f)).ToArray();
            }
            else
            {
                metadataOutput.MetadataObjectTypes = metadataOutput.AvailableMetadataObjectTypes;
            }



            previewLayer = new AVCaptureVideoPreviewLayer(session);

            //Framerate set here (15 fps)
            if (previewLayer.RespondsToSelector(new Selector("connection")))
            {
                if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
                {
                    var perf1 = PerformanceCounter.Start();

                    NSError lockForConfigErr = null;

                    captureDevice.LockForConfiguration(out lockForConfigErr);
                    if (lockForConfigErr == null)
                    {
                        captureDevice.ActiveVideoMinFrameDuration = new CMTime(1, 10);
                        captureDevice.UnlockForConfiguration();
                    }

                    PerformanceCounter.Stop(perf1, "PERF: ActiveVideoMinFrameDuration Took {0} ms");
                }
                else
                {
                    previewLayer.Connection.VideoMinFrameDuration = new CMTime(1, 10);
                }
            }

            previewLayer.LayerVideoGravity = AVLayerVideoGravity.ResizeAspectFill;
            previewLayer.Frame             = new RectangleF(0, 0, this.Frame.Width, this.Frame.Height);
            previewLayer.Position          = new PointF(this.Layer.Bounds.Width / 2, (this.Layer.Bounds.Height / 2));

            layerView = new UIView(new RectangleF(0, 0, this.Frame.Width, this.Frame.Height));
            layerView.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
            layerView.Layer.AddSublayer(previewLayer);

            this.AddSubview(layerView);

            ResizePreview(UIApplication.SharedApplication.StatusBarOrientation);

            if (overlayView != null)
            {
                this.AddSubview(overlayView);
                this.BringSubviewToFront(overlayView);

                //overlayView.LayoutSubviews ();
            }

            session.StartRunning();

            Console.WriteLine("RUNNING!!!");



            //output.AlwaysDiscardsLateVideoFrames = true;


            Console.WriteLine("SetupCamera Finished");

            //session.AddOutput (output);
            //session.StartRunning ();


            if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus))
            {
                NSError err = null;
                if (captureDevice.LockForConfiguration(out err))
                {
                    if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeContinuousAutoFocus))
                    {
                        captureDevice.FocusMode = AVCaptureFocusMode.ModeContinuousAutoFocus;
                    }
                    else if (captureDevice.IsFocusModeSupported(AVCaptureFocusMode.ModeAutoFocus))
                    {
                        captureDevice.FocusMode = AVCaptureFocusMode.ModeAutoFocus;
                    }

                    if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure))
                    {
                        captureDevice.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure;
                    }
                    else if (captureDevice.IsExposureModeSupported(AVCaptureExposureMode.AutoExpose))
                    {
                        captureDevice.ExposureMode = AVCaptureExposureMode.AutoExpose;
                    }

                    if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance))
                    {
                        captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance;
                    }
                    else if (captureDevice.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.AutoWhiteBalance))
                    {
                        captureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.AutoWhiteBalance;
                    }

                    if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0) && captureDevice.AutoFocusRangeRestrictionSupported)
                    {
                        captureDevice.AutoFocusRangeRestriction = AVCaptureAutoFocusRangeRestriction.Near;
                    }

                    if (captureDevice.FocusPointOfInterestSupported)
                    {
                        captureDevice.FocusPointOfInterest = new PointF(0.5f, 0.5f);
                    }

                    if (captureDevice.ExposurePointOfInterestSupported)
                    {
                        captureDevice.ExposurePointOfInterest = new PointF(0.5f, 0.5f);
                    }

                    captureDevice.UnlockForConfiguration();
                }
                else
                {
                    Console.WriteLine("Failed to Lock for Config: " + err.Description);
                }
            }

            return(true);
        }
Exemple #25
0
        public override void ViewDidLoad()
        {
            base.ViewDidLoad();
            this.View.BackgroundColor = UIColor.White;
            int v = 0;

            if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
            {
                v = 20;
            }
            nfloat  height = this.View.Frame.Height / 2 - v;
            CGRect  frame  = new CGRect(0, v, this.View.Frame.Width, height);
            UIImage image  = TouchStyle.ImageFromResource("barcode-overlay-sm.png");

            this._imageOverlay = new UIImageView(image)
            {
                Frame            = frame,
                ContentMode      = UIViewContentMode.Center,
                AutoresizingMask = (UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight | UIViewAutoresizing.FlexibleBottomMargin)
            };
            this.View.Add(this._imageOverlay);
            UIImage image2 = TouchStyle.ImageFromResource("barcode-scanblocked-sm.png");

            this._imageOverlayScanBlocked = new UIImageView(image2)
            {
                Frame            = frame,
                ContentMode      = UIViewContentMode.Center,
                AutoresizingMask = (UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight | UIViewAutoresizing.FlexibleBottomMargin),
                Hidden           = true
            };
            this.View.Add(this._imageOverlayScanBlocked);
            this._cameraView = new UIView
            {
                AutoresizingMask = (UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight | UIViewAutoresizing.FlexibleBottomMargin)
            };
            this._cameraView.Frame = frame;

            AVAuthorizationStatus authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);

            if (authorizationStatus == AVAuthorizationStatus.Authorized)
            {
                AVCaptureDevice      device = AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); // update for iOS 13
                NSError              nSError;
                AVCaptureDeviceInput aVCaptureDeviceInput = AVCaptureDeviceInput.FromDevice(device, out nSError);
                if (aVCaptureDeviceInput != null)
                {
                    this._captureSession = new AVCaptureSession();
                    this._captureSession.AddInput(aVCaptureDeviceInput);
                    AVCaptureMetadataOutput aVCaptureMetadataOutput = new AVCaptureMetadataOutput();
                    this._captureSession.AddOutput(aVCaptureMetadataOutput);
                    this._cameraMetaDataDelegate = new CameraScannerSplitView.CameraMetaDataDelegate(this);
                    aVCaptureMetadataOutput.SetDelegate(this._cameraMetaDataDelegate, DispatchQueue.MainQueue);
                    //aVCaptureMetadataOutput.MetadataObjectTypes = aVCaptureMetadataOutput.AvailableMetadataObjectTypes;
                    aVCaptureMetadataOutput.MetadataObjectTypes = AVMetadataObjectType.QRCode | AVMetadataObjectType.Code128Code | AVMetadataObjectType.UPCECode | AVMetadataObjectType.EAN13Code;
                }
            }
            else if (authorizationStatus == AVAuthorizationStatus.NotDetermined)
            {
                AVCaptureDevice.RequestAccessForMediaType(AVMediaType.Video, (granted) =>
                {
                    if (!granted)
                    {
                        Device.Log.Error("ViewDidLoadBase ScanLayer RequestAccessForMediaType not granted!");
                    }
                    else
                    {
                        Device.Log.Error("ViewDidLoadBase ScanLayer RequestAccessForMediaType granted!");
                    }
                });
            }
            else
            {
                Device.Log.Error("Not Authorized! Status: " + authorizationStatus.ToString());
            }


            if (authorizationStatus >= AVAuthorizationStatus.NotDetermined && authorizationStatus <= AVAuthorizationStatus.Authorized)
            {
                switch ((int)authorizationStatus)
                {
                case 0:
                    AVCaptureDevice.RequestAccessForMediaType(AVMediaType.Video, delegate(bool result)
                    {
                        Device.Thread.ExecuteOnMainThread(delegate
                        {
                            if (result)
                            {
                                this.SetupVideoPreviewLayer();
                            }
                            else
                            {
                                this.AddNoCameraAccessLabels();
                            }
                        });
                    });
                    break;

                case 1:
                    Device.Log.Warn("Camera Access is restricted", new object[0]);
                    this.AddNoCameraAccessLabels();
                    break;

                case 2:
                    this.AddNoCameraAccessLabels();
                    break;

                case 3:
                    this.SetupVideoPreviewLayer();
                    break;
                }
            }
            this.View.InsertSubviewBelow(this._cameraView, this._imageOverlay);
            CGRect frame2 = new CGRect(0, frame.Bottom, this.View.Frame.Width, this.View.Frame.Height - frame.Height);

            this._resultsView = new UITableView(frame2, UITableViewStyle.Plain)
            {
                AutoresizingMask = (UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleTopMargin | UIViewAutoresizing.FlexibleHeight)
            };
            this._resultsView.Source = new CameraScannerSplitView.CameraListSource(this);
            this.View.Add(this._resultsView);
            this.NavigationItem.LeftBarButtonItem = new UIBarButtonItem("Done", UIBarButtonItemStyle.Done, delegate
            {
                string text = string.Empty;
                if (this._resultsView.Source != null)
                {
                    try
                    {
                        string text2       = string.Empty;
                        List <string> list = (this._resultsView.Source as CameraScannerSplitView.CameraListSource).ScannedBarcodes();
                        foreach (string current in list)
                        {
                            if (!string.IsNullOrEmpty(current))
                            {
                                text2 = text2 + current + "\r\n";
                            }
                        }
                        text = text2;
                    }
                    catch (Exception arg)
                    {
                        Device.Log.Error("This error occurred while parsing barcodes scanned: \r\n" + arg, new object[0]);
                    }
                }
                if (this._callback.Parameters == null)
                {
                    this._callback.Parameters = new Dictionary <string, string>();
                }
                this._callback.Parameters[this._barcodeValueKey] = text;
                iApp.Navigate(this._callback);
                ModalManager.EnqueueModalTransition(TouchFactory.Instance.TopViewController, null, true);
            });
        }